summaryrefslogtreecommitdiff
path: root/dbtests
diff options
context:
space:
mode:
Diffstat (limited to 'dbtests')
-rw-r--r--dbtests/background_job_test.cpp109
-rw-r--r--dbtests/balancer_policy_tests.cpp203
-rw-r--r--dbtests/basictests.cpp277
-rw-r--r--dbtests/btreetests.cpp1412
-rw-r--r--dbtests/clienttests.cpp77
-rw-r--r--dbtests/commandtests.cpp18
-rw-r--r--dbtests/cursortests.cpp33
-rw-r--r--dbtests/d_chunk_manager_tests.cpp467
-rw-r--r--dbtests/dbtests.cpp4
-rw-r--r--dbtests/directclienttests.cpp80
-rw-r--r--dbtests/framework.cpp142
-rw-r--r--dbtests/framework.h52
-rw-r--r--dbtests/histogram_test.cpp20
-rw-r--r--dbtests/jsobjtests.cpp370
-rw-r--r--dbtests/jsontests.cpp74
-rw-r--r--dbtests/jstests.cpp363
-rw-r--r--dbtests/matchertests.cpp66
-rw-r--r--dbtests/mmaptests.cpp219
-rw-r--r--dbtests/mockdbclient.h4
-rw-r--r--dbtests/namespacetests.cpp56
-rw-r--r--dbtests/pairingtests.cpp24
-rw-r--r--dbtests/pdfiletests.cpp131
-rw-r--r--dbtests/perf/btreeperf.cpp442
-rw-r--r--dbtests/perf/perftest.cpp88
-rw-r--r--dbtests/perftests.cpp336
-rw-r--r--dbtests/queryoptimizertests.cpp555
-rw-r--r--dbtests/querytests.cpp302
-rw-r--r--dbtests/repltests.cpp411
-rw-r--r--dbtests/sharding.cpp12
-rw-r--r--dbtests/socktests.cpp13
-rw-r--r--dbtests/spin_lock_test.cpp68
-rw-r--r--dbtests/test.vcproj1453
-rw-r--r--dbtests/test.vcxproj57
-rwxr-xr-xdbtests/test.vcxproj.filters141
-rw-r--r--dbtests/threadedtests.cpp154
-rw-r--r--dbtests/updatetests.cpp195
36 files changed, 5466 insertions, 2962 deletions
diff --git a/dbtests/background_job_test.cpp b/dbtests/background_job_test.cpp
new file mode 100644
index 0000000..f2bf7d8
--- /dev/null
+++ b/dbtests/background_job_test.cpp
@@ -0,0 +1,109 @@
+// @file background_job_test.cpp
+
+/**
+ * Copyright (C) 2010 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "../pch.h"
+#include <boost/thread/thread.hpp>
+
+#include "dbtests.h"
+#include "../util/time_support.h"
+#include "../util/background.h"
+
+namespace BackgroundJobTests {
+
+ // a global variable that can be accessed independent of the IncTester object below
+ // IncTester keeps it up-to-date
+ int GLOBAL_val;
+
+ class IncTester : public mongo::BackgroundJob {
+ public:
+ explicit IncTester( long long millis , bool selfDelete = false )
+ : BackgroundJob(selfDelete), _val(0), _millis(millis) { GLOBAL_val = 0; }
+
+ void waitAndInc( long long millis ) {
+ if ( millis )
+ mongo::sleepmillis( millis );
+ ++_val;
+ ++GLOBAL_val;
+ }
+
+ int getVal() { return _val; }
+
+ /* --- BackgroundJob virtuals --- */
+
+ string name() const { return "IncTester"; }
+
+ void run() { waitAndInc( _millis ); }
+
+ private:
+ int _val;
+ long long _millis;
+ };
+
+
+ class NormalCase {
+ public:
+ void run() {
+ IncTester tester( 0 /* inc without wait */ );
+ tester.go();
+ ASSERT( tester.wait() );
+ ASSERT_EQUALS( tester.getVal() , 1 );
+ }
+ };
+
+ class TimeOutCase {
+ public:
+ void run() {
+ IncTester tester( 1000 /* wait 1sec before inc-ing */ );
+ tester.go();
+ ASSERT( ! tester.wait( 100 /* ms */ ) ); // should time out
+ ASSERT_EQUALS( tester.getVal() , 0 );
+
+ // if we wait longer than the IncTester, we should see the increment
+ ASSERT( tester.wait( 1500 /* ms */ ) ); // should not time out
+ ASSERT_EQUALS( tester.getVal() , 1 );
+ }
+ };
+
+ class SelfDeletingCase {
+ public:
+ void run() {
+ BackgroundJob* j = new IncTester( 0 /* inc without wait */ , true /* self delete */ );
+ j->go();
+
+
+ // the background thread should have continued running and this test should pass the
+ // heap-checker as well
+ mongo::sleepmillis( 1000 );
+ ASSERT_EQUALS( GLOBAL_val, 1 );
+ }
+ };
+
+
+ class BackgroundJobSuite : public Suite {
+ public:
+ BackgroundJobSuite() : Suite( "background_job" ) {}
+
+ void setupTests() {
+ add< NormalCase >();
+ add< TimeOutCase >();
+ add< SelfDeletingCase >();
+ }
+
+ } backgroundJobSuite;
+
+} // namespace BackgroundJobTests
diff --git a/dbtests/balancer_policy_tests.cpp b/dbtests/balancer_policy_tests.cpp
new file mode 100644
index 0000000..6f7c4a5
--- /dev/null
+++ b/dbtests/balancer_policy_tests.cpp
@@ -0,0 +1,203 @@
+// @file balancer_policy_test.cpp
+
+/**
+ * Copyright (C) 2010 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "dbtests.h"
+
+// TODO SERVER-1822
+//#include "../s/config.h" // for ShardFields
+//#include "../s/balancer_policy.h"
+
+namespace BalancerPolicyTests {
+
+//
+// TODO SERVER-1822
+//
+#if 0
+
+ typedef mongo::ShardFields sf; // fields from 'shards' colleciton
+ typedef mongo::LimitsFields lf; // fields from the balancer's limits map
+
+ class SizeMaxedShardTest {
+ public:
+ void run() {
+ BSONObj shard0 = BSON( sf::maxSize(0LL) << lf::currSize(0LL) );
+ ASSERT( ! BalancerPolicy::isSizeMaxed( shard0 ) );
+
+ BSONObj shard1 = BSON( sf::maxSize(100LL) << lf::currSize(80LL) );
+ ASSERT( ! BalancerPolicy::isSizeMaxed( shard1 ) );
+
+ BSONObj shard2 = BSON( sf::maxSize(100LL) << lf::currSize(110LL) );
+ ASSERT( BalancerPolicy::isSizeMaxed( shard2 ) );
+
+ BSONObj empty;
+ ASSERT( ! BalancerPolicy::isSizeMaxed( empty ) );
+ }
+ };
+
+ class DrainingShardTest {
+ public:
+ void run() {
+ BSONObj shard0 = BSON( sf::draining(true) );
+ ASSERT( BalancerPolicy::isDraining( shard0 ) );
+
+ BSONObj shard1 = BSON( sf::draining(false) );
+ ASSERT( ! BalancerPolicy::isDraining( shard1 ) );
+
+ BSONObj empty;
+ ASSERT( ! BalancerPolicy::isDraining( empty ) );
+ }
+ };
+
+ class BalanceNormalTest {
+ public:
+ void run() {
+ // 2 chunks and 0 chunk shards
+ BalancerPolicy::ShardToChunksMap chunkMap;
+ vector<BSONObj> chunks;
+ chunks.push_back(BSON( "min" << BSON( "x" << BSON( "$minKey"<<1) ) <<
+ "max" << BSON( "x" << 49 )));
+ chunks.push_back(BSON( "min" << BSON( "x" << 49 ) <<
+ "max" << BSON( "x" << BSON( "$maxkey"<<1 ))));
+ chunkMap["shard0"] = chunks;
+ chunks.clear();
+ chunkMap["shard1"] = chunks;
+
+ // no limits
+ BalancerPolicy::ShardToLimitsMap limitsMap;
+ BSONObj limits0 = BSON( sf::maxSize(0LL) << lf::currSize(2LL) << sf::draining(false) << lf::hasOpsQueued(false) );
+ BSONObj limits1 = BSON( sf::maxSize(0LL) << lf::currSize(0LL) << sf::draining(false) << lf::hasOpsQueued(false) );
+ limitsMap["shard0"] = limits0;
+ limitsMap["shard1"] = limits1;
+
+ BalancerPolicy::ChunkInfo* c = NULL;
+ c = BalancerPolicy::balance( "ns", limitsMap, chunkMap, 1 );
+ ASSERT( c );
+ }
+ };
+
+ class BalanceDrainingTest {
+ public:
+ void run() {
+ // one normal, one draining
+ // 2 chunks and 0 chunk shards
+ BalancerPolicy::ShardToChunksMap chunkMap;
+ vector<BSONObj> chunks;
+ chunks.push_back(BSON( "min" << BSON( "x" << BSON( "$minKey"<<1) ) <<
+ "max" << BSON( "x" << 49 )));
+ chunkMap["shard0"] = chunks;
+ chunks.clear();
+ chunks.push_back(BSON( "min" << BSON( "x" << 49 ) <<
+ "max" << BSON( "x" << BSON( "$maxkey"<<1 ))));
+ chunkMap["shard1"] = chunks;
+
+ // shard0 is draining
+ BalancerPolicy::ShardToLimitsMap limitsMap;
+ BSONObj limits0 = BSON( sf::maxSize(0LL) << lf::currSize(2LL) << sf::draining(true) );
+ BSONObj limits1 = BSON( sf::maxSize(0LL) << lf::currSize(0LL) << sf::draining(false) );
+ limitsMap["shard0"] = limits0;
+ limitsMap["shard1"] = limits1;
+
+ BalancerPolicy::ChunkInfo* c = NULL;
+ c = BalancerPolicy::balance( "ns", limitsMap, chunkMap, 0 );
+ ASSERT( c );
+ ASSERT_EQUALS( c->to , "shard1" );
+ ASSERT_EQUALS( c->from , "shard0" );
+ ASSERT( ! c->chunk.isEmpty() );
+ }
+ };
+
+ class BalanceEndedDrainingTest {
+ public:
+ void run() {
+ // 2 chunks and 0 chunk (drain completed) shards
+ BalancerPolicy::ShardToChunksMap chunkMap;
+ vector<BSONObj> chunks;
+ chunks.push_back(BSON( "min" << BSON( "x" << BSON( "$minKey"<<1) ) <<
+ "max" << BSON( "x" << 49 )));
+ chunks.push_back(BSON( "min" << BSON( "x" << 49 ) <<
+ "max" << BSON( "x" << BSON( "$maxkey"<<1 ))));
+ chunkMap["shard0"] = chunks;
+ chunks.clear();
+ chunkMap["shard1"] = chunks;
+
+ // no limits
+ BalancerPolicy::ShardToLimitsMap limitsMap;
+ BSONObj limits0 = BSON( sf::maxSize(0LL) << lf::currSize(2LL) << sf::draining(false) );
+ BSONObj limits1 = BSON( sf::maxSize(0LL) << lf::currSize(0LL) << sf::draining(true) );
+ limitsMap["shard0"] = limits0;
+ limitsMap["shard1"] = limits1;
+
+ BalancerPolicy::ChunkInfo* c = NULL;
+ c = BalancerPolicy::balance( "ns", limitsMap, chunkMap, 0 );
+ ASSERT( ! c );
+ }
+ };
+
+ class BalanceImpasseTest {
+ public:
+ void run() {
+ // one maxed out, one draining
+ // 2 chunks and 0 chunk shards
+ BalancerPolicy::ShardToChunksMap chunkMap;
+ vector<BSONObj> chunks;
+ chunks.push_back(BSON( "min" << BSON( "x" << BSON( "$minKey"<<1) ) <<
+ "max" << BSON( "x" << 49 )));
+ chunkMap["shard0"] = chunks;
+ chunks.clear();
+ chunks.push_back(BSON( "min" << BSON( "x" << 49 ) <<
+ "max" << BSON( "x" << BSON( "$maxkey"<<1 ))));
+ chunkMap["shard1"] = chunks;
+
+ // shard0 is draining, shard1 is maxed out, shard2 has writebacks pending
+ BalancerPolicy::ShardToLimitsMap limitsMap;
+ BSONObj limits0 = BSON( sf::maxSize(0LL) << lf::currSize(2LL) << sf::draining(true) );
+ BSONObj limits1 = BSON( sf::maxSize(1LL) << lf::currSize(1LL) << sf::draining(false) );
+ BSONObj limits2 = BSON( sf::maxSize(0LL) << lf::currSize(1LL) << lf::hasOpsQueued(true) );
+ limitsMap["shard0"] = limits0;
+ limitsMap["shard1"] = limits1;
+ limitsMap["shard2"] = limits2;
+
+ BalancerPolicy::ChunkInfo* c = NULL;
+ c = BalancerPolicy::balance( "ns", limitsMap, chunkMap, 0 );
+ ASSERT( ! c );
+ }
+ };
+
+//
+// TODO SERVER-1822
+//
+#endif // #if 0
+
+ class All : public Suite {
+ public:
+ All() : Suite( "balancer_policy" ) {
+ }
+
+ void setupTests() {
+ // TODO SERVER-1822
+ // add< SizeMaxedShardTest >();
+ // add< DrainingShardTest >();
+ // add< BalanceNormalTest >();
+ // add< BalanceDrainingTest >();
+ // add< BalanceEndedDrainingTest >();
+ // add< BalanceImpasseTest >();
+ }
+ } allTests;
+
+} // namespace BalancerPolicyTests
diff --git a/dbtests/basictests.cpp b/dbtests/basictests.cpp
index f1e788a..3e0eecd 100644
--- a/dbtests/basictests.cpp
+++ b/dbtests/basictests.cpp
@@ -23,6 +23,8 @@
#include "../util/base64.h"
#include "../util/array.h"
#include "../util/text.h"
+#include "../util/queue.h"
+#include "../util/paths.h"
namespace BasicTests {
@@ -49,21 +51,21 @@ namespace BasicTests {
RARELY ++c;
}
};
-
+
class Base64Tests {
public:
-
- void roundTrip( string s ){
+
+ void roundTrip( string s ) {
ASSERT_EQUALS( s , base64::decode( base64::encode( s ) ) );
}
-
- void roundTrip( const unsigned char * _data , int len ){
+
+ void roundTrip( const unsigned char * _data , int len ) {
const char *data = (const char *) _data;
string s = base64::encode( data , len );
string out = base64::decode( s );
ASSERT_EQUALS( out.size() , static_cast<size_t>(len) );
bool broke = false;
- for ( int i=0; i<len; i++ ){
+ for ( int i=0; i<len; i++ ) {
if ( data[i] != out[i] )
broke = true;
}
@@ -77,16 +79,16 @@ namespace BasicTests {
for ( int i=0; i<len; i++ )
cout << hex << ( out[i] & 0xFF ) << dec << " ";
cout << endl;
-
+
ASSERT(0);
}
-
- void run(){
+
+ void run() {
ASSERT_EQUALS( "ZWxp" , base64::encode( "eli" , 3 ) );
ASSERT_EQUALS( "ZWxpb3Rz" , base64::encode( "eliots" , 6 ) );
ASSERT_EQUALS( "ZWxpb3Rz" , base64::encode( "eliots" ) );
-
+
ASSERT_EQUALS( "ZQ==" , base64::encode( "e" , 1 ) );
ASSERT_EQUALS( "ZWw=" , base64::encode( "el" , 2 ) );
@@ -97,10 +99,10 @@ namespace BasicTests {
roundTrip( "eliot" );
roundTrip( "eliots" );
roundTrip( "eliotsz" );
-
+
unsigned char z[] = { 0x1 , 0x2 , 0x3 , 0x4 };
roundTrip( z , 4 );
-
+
unsigned char y[] = {
0x01, 0x10, 0x83, 0x10, 0x51, 0x87, 0x20, 0x92, 0x8B, 0x30,
0xD3, 0x8F, 0x41, 0x14, 0x93, 0x51, 0x55, 0x97, 0x61, 0x96,
@@ -115,15 +117,15 @@ namespace BasicTests {
namespace stringbuildertests {
#define SBTGB(x) ss << (x); sb << (x);
-
+
class Base {
virtual void pop() = 0;
-
+
public:
- Base(){}
- virtual ~Base(){}
+ Base() {}
+ virtual ~Base() {}
- void run(){
+ void run() {
pop();
ASSERT_EQUALS( ss.str() , sb.str() );
}
@@ -131,9 +133,9 @@ namespace BasicTests {
stringstream ss;
StringBuilder sb;
};
-
+
class simple1 : public Base {
- void pop(){
+ void pop() {
SBTGB(1);
SBTGB("yo");
SBTGB(2);
@@ -141,7 +143,7 @@ namespace BasicTests {
};
class simple2 : public Base {
- void pop(){
+ void pop() {
SBTGB(1);
SBTGB("yo");
SBTGB(2);
@@ -154,10 +156,10 @@ namespace BasicTests {
SBTGB( (short)(1231231231231LL) );
}
};
-
+
class reset1 {
public:
- void run(){
+ void run() {
StringBuilder sb;
sb << "1" << "abc" << "5.17";
ASSERT_EQUALS( "1abc5.17" , sb.str() );
@@ -171,7 +173,7 @@ namespace BasicTests {
class reset2 {
public:
- void run(){
+ void run() {
StringBuilder sb;
sb << "1" << "abc" << "5.17";
ASSERT_EQUALS( "1abc5.17" , sb.str() );
@@ -188,12 +190,19 @@ namespace BasicTests {
class sleeptest {
public:
- void run(){
+ void run() {
Timer t;
- sleepsecs( 1 );
- ASSERT_EQUALS( 1 , t.seconds() );
+ int matches = 0;
+ for( int p = 0; p < 3; p++ ) {
+ sleepsecs( 1 );
+ int sec = t.seconds();
+ if( sec == 1 )
+ matches++;
+ ASSERT( sec >= 0 && sec <= 2 );
+ t.reset();
+ }
+ ASSERT( matches >= 2 );
- t.reset();
sleepmicros( 1527123 );
ASSERT( t.micros() > 1000000 );
ASSERT( t.micros() < 2000000 );
@@ -202,17 +211,17 @@ namespace BasicTests {
sleepmillis( 1727 );
ASSERT( t.millis() >= 1000 );
ASSERT( t.millis() <= 2500 );
-
+
{
int total = 1200;
int ms = 2;
t.reset();
- for ( int i=0; i<(total/ms); i++ ){
+ for ( int i=0; i<(total/ms); i++ ) {
sleepmillis( ms );
}
{
int x = t.millis();
- if ( x < 1000 || x > 2500 ){
+ if ( x < 1000 || x > 2500 ) {
cout << "sleeptest x: " << x << endl;
ASSERT( x >= 1000 );
ASSERT( x <= 20000 );
@@ -226,12 +235,12 @@ namespace BasicTests {
int micros = 100;
t.reset();
int numSleeps = 1000*(total/micros);
- for ( int i=0; i<numSleeps; i++ ){
+ for ( int i=0; i<numSleeps; i++ ) {
sleepmicros( micros );
}
{
int y = t.millis();
- if ( y < 1000 || y > 2500 ){
+ if ( y < 1000 || y > 2500 ) {
cout << "sleeptest y: " << y << endl;
ASSERT( y >= 1000 );
/* ASSERT( y <= 100000 ); */
@@ -239,9 +248,9 @@ namespace BasicTests {
}
}
#endif
-
+
}
-
+
};
class AssertTests {
@@ -249,15 +258,15 @@ namespace BasicTests {
int x;
- AssertTests(){
+ AssertTests() {
x = 0;
}
- string foo(){
+ string foo() {
x++;
return "";
}
- void run(){
+ void run() {
uassert( -1 , foo() , 1 );
if( x != 0 ) {
ASSERT_EQUALS( 0 , x );
@@ -265,7 +274,7 @@ namespace BasicTests {
try {
uassert( -1 , foo() , 0 );
}
- catch ( ... ){}
+ catch ( ... ) {}
ASSERT_EQUALS( 1 , x );
}
};
@@ -273,13 +282,13 @@ namespace BasicTests {
namespace ArrayTests {
class basic1 {
public:
- void run(){
+ void run() {
FastArray<int> a(100);
a.push_back( 5 );
a.push_back( 6 );
-
+
ASSERT_EQUALS( 2 , a.size() );
-
+
FastArray<int>::iterator i = a.begin();
ASSERT( i != a.end() );
ASSERT_EQUALS( 5 , *i );
@@ -291,10 +300,10 @@ namespace BasicTests {
}
};
};
-
+
class ThreadSafeStringTest {
public:
- void run(){
+ void run() {
ThreadSafeString s;
s = "eliot";
ASSERT_EQUALS( s , "eliot" );
@@ -302,8 +311,8 @@ namespace BasicTests {
ThreadSafeString s2 = s;
ASSERT_EQUALS( s2 , "eliot" );
-
-
+
+
{
string foo;
{
@@ -315,11 +324,11 @@ namespace BasicTests {
}
}
};
-
+
class LexNumCmp {
public:
void run() {
-
+
ASSERT( ! isNumber( (char)255 ) );
ASSERT_EQUALS( 0, lexNumCmp( "a", "a" ) );
@@ -355,7 +364,7 @@ namespace BasicTests {
ASSERT_EQUALS( -1, lexNumCmp( "a1{", "a1{a" ) );
ASSERT_EQUALS( 1, lexNumCmp("21", "11") );
ASSERT_EQUALS( -1, lexNumCmp("11", "21") );
-
+
ASSERT_EQUALS( -1 , lexNumCmp( "a.0" , "a.1" ) );
ASSERT_EQUALS( -1 , lexNumCmp( "a.0.b" , "a.1" ) );
@@ -363,52 +372,78 @@ namespace BasicTests {
ASSERT_EQUALS( -1 , lexNumCmp( "b.0e" , (string("b.") + (char)255).c_str() ) );
ASSERT_EQUALS( -1 , lexNumCmp( "b." , "b.0e" ) );
- ASSERT_EQUALS( 0, lexNumCmp( "238947219478347782934718234", "238947219478347782934718234"));
- ASSERT_EQUALS( 0, lexNumCmp( "000238947219478347782934718234", "238947219478347782934718234"));
- ASSERT_EQUALS( 1, lexNumCmp( "000238947219478347782934718235", "238947219478347782934718234"));
- ASSERT_EQUALS( -1, lexNumCmp( "238947219478347782934718234", "238947219478347782934718234.1"));
- ASSERT_EQUALS( 0, lexNumCmp( "238", "000238"));
- ASSERT_EQUALS( 0, lexNumCmp( "002384", "0002384"));
- ASSERT_EQUALS( 0, lexNumCmp( "00002384", "0002384"));
- ASSERT_EQUALS( 0, lexNumCmp( "0", "0"));
- ASSERT_EQUALS( 0, lexNumCmp( "0000", "0"));
+ ASSERT_EQUALS( 0, lexNumCmp( "238947219478347782934718234", "238947219478347782934718234"));
+ ASSERT_EQUALS( 0, lexNumCmp( "000238947219478347782934718234", "238947219478347782934718234"));
+ ASSERT_EQUALS( 1, lexNumCmp( "000238947219478347782934718235", "238947219478347782934718234"));
+ ASSERT_EQUALS( -1, lexNumCmp( "238947219478347782934718234", "238947219478347782934718234.1"));
+ ASSERT_EQUALS( 0, lexNumCmp( "238", "000238"));
+ ASSERT_EQUALS( 0, lexNumCmp( "002384", "0002384"));
+ ASSERT_EQUALS( 0, lexNumCmp( "00002384", "0002384"));
+ ASSERT_EQUALS( 0, lexNumCmp( "0", "0"));
+ ASSERT_EQUALS( 0, lexNumCmp( "0000", "0"));
ASSERT_EQUALS( 0, lexNumCmp( "0", "000"));
ASSERT_EQUALS( -1, lexNumCmp( "0000", "0.0"));
- ASSERT_EQUALS( 1, lexNumCmp( "2380", "238"));
- ASSERT_EQUALS( 1, lexNumCmp( "2385", "2384"));
- ASSERT_EQUALS( 1, lexNumCmp( "2385", "02384"));
- ASSERT_EQUALS( 1, lexNumCmp( "2385", "002384"));
- ASSERT_EQUALS( -1, lexNumCmp( "123.234.4567", "00238"));
- ASSERT_EQUALS( 0, lexNumCmp( "123.234", "00123.234"));
- ASSERT_EQUALS( 0, lexNumCmp( "a.123.b", "a.00123.b"));
- ASSERT_EQUALS( 1, lexNumCmp( "a.123.b", "a.b.00123.b"));
- ASSERT_EQUALS( -1, lexNumCmp( "a.00.0", "a.0.1"));
- ASSERT_EQUALS( 0, lexNumCmp( "01.003.02", "1.3.2"));
- ASSERT_EQUALS( -1, lexNumCmp( "1.3.2", "10.300.20"));
- ASSERT_EQUALS( 0, lexNumCmp( "10.300.20", "000000000000010.0000300.000000020"));
- ASSERT_EQUALS( 0, lexNumCmp( "0000a", "0a"));
- ASSERT_EQUALS( -1, lexNumCmp( "a", "0a"));
- ASSERT_EQUALS( -1, lexNumCmp( "000a", "001a"));
- ASSERT_EQUALS( 0, lexNumCmp( "010a", "0010a"));
+ ASSERT_EQUALS( 1, lexNumCmp( "2380", "238"));
+ ASSERT_EQUALS( 1, lexNumCmp( "2385", "2384"));
+ ASSERT_EQUALS( 1, lexNumCmp( "2385", "02384"));
+ ASSERT_EQUALS( 1, lexNumCmp( "2385", "002384"));
+ ASSERT_EQUALS( -1, lexNumCmp( "123.234.4567", "00238"));
+ ASSERT_EQUALS( 0, lexNumCmp( "123.234", "00123.234"));
+ ASSERT_EQUALS( 0, lexNumCmp( "a.123.b", "a.00123.b"));
+ ASSERT_EQUALS( 1, lexNumCmp( "a.123.b", "a.b.00123.b"));
+ ASSERT_EQUALS( -1, lexNumCmp( "a.00.0", "a.0.1"));
+ ASSERT_EQUALS( 0, lexNumCmp( "01.003.02", "1.3.2"));
+ ASSERT_EQUALS( -1, lexNumCmp( "1.3.2", "10.300.20"));
+ ASSERT_EQUALS( 0, lexNumCmp( "10.300.20", "000000000000010.0000300.000000020"));
+ ASSERT_EQUALS( 0, lexNumCmp( "0000a", "0a"));
+ ASSERT_EQUALS( -1, lexNumCmp( "a", "0a"));
+ ASSERT_EQUALS( -1, lexNumCmp( "000a", "001a"));
+ ASSERT_EQUALS( 0, lexNumCmp( "010a", "0010a"));
}
};
class DatabaseValidNames {
public:
- void run(){
+ void run() {
ASSERT( Database::validDBName( "foo" ) );
ASSERT( ! Database::validDBName( "foo/bar" ) );
ASSERT( ! Database::validDBName( "foo.bar" ) );
- ASSERT( nsDollarCheck( "asdads" ) );
- ASSERT( ! nsDollarCheck( "asda$ds" ) );
- ASSERT( nsDollarCheck( "local.oplog.$main" ) );
+ ASSERT( isANormalNSName( "asdads" ) );
+ ASSERT( ! isANormalNSName( "asda$ds" ) );
+ ASSERT( isANormalNSName( "local.oplog.$main" ) );
+ }
+ };
+
+ class DatabaseOwnsNS {
+ public:
+ void run() {
+
+ bool isNew = false;
+ // this leaks as ~Database is private
+ // if that changes, should put this on the stack
+ Database * db = new Database( "dbtests_basictests_ownsns" , isNew );
+ assert( isNew );
+
+ ASSERT( db->ownsNS( "dbtests_basictests_ownsns.x" ) );
+ ASSERT( db->ownsNS( "dbtests_basictests_ownsns.x.y" ) );
+ ASSERT( ! db->ownsNS( "dbtests_basictests_ownsn.x.y" ) );
+ ASSERT( ! db->ownsNS( "dbtests_basictests_ownsnsa.x.y" ) );
+ }
+ };
+
+ class NSValidNames {
+ public:
+ void run() {
+ ASSERT( isValidNS( "test.foo" ) );
+ ASSERT( ! isValidNS( "test." ) );
+ ASSERT( ! isValidNS( "test" ) );
}
};
-
+
class PtrTests {
public:
- void run(){
+ void run() {
scoped_ptr<int> p1 (new int(1));
boost::shared_ptr<int> p2 (new int(2));
scoped_ptr<const int> p3 (new int(3));
@@ -419,7 +454,7 @@ namespace BasicTests {
ASSERT_EQUALS( p2.get() , ptr<int>(p2) );
ASSERT_EQUALS( p2.get() , ptr<int>(p2.get()) ); // T* constructor
ASSERT_EQUALS( p2.get() , ptr<int>(ptr<int>(p2)) ); // copy constructor
- ASSERT_EQUALS( *p2 , *ptr<int>(p2));
+ ASSERT_EQUALS( *p2 , *ptr<int>(p2));
ASSERT_EQUALS( p2.get() , ptr<boost::shared_ptr<int> >(&p2)->get() ); // operator->
//const
@@ -431,14 +466,14 @@ namespace BasicTests {
ASSERT_EQUALS( p4.get() , ptr<const int>(p4.get()) );
ASSERT_EQUALS( p2.get() , ptr<const int>(ptr<const int>(p2)) );
ASSERT_EQUALS( p2.get() , ptr<const int>(ptr<int>(p2)) ); // constizing copy constructor
- ASSERT_EQUALS( *p2 , *ptr<int>(p2));
+ ASSERT_EQUALS( *p2 , *ptr<int>(p2));
ASSERT_EQUALS( p2.get() , ptr<const boost::shared_ptr<int> >(&p2)->get() );
//bool context
ASSERT( ptr<int>(p1) );
ASSERT( !ptr<int>(NULL) );
ASSERT( !ptr<int>() );
-
+
#if 0
// These shouldn't compile
ASSERT_EQUALS( p3.get() , ptr<int>(p3) );
@@ -450,12 +485,12 @@ namespace BasicTests {
struct StringSplitterTest {
- void test( string s ){
+ void test( string s ) {
vector<string> v = StringSplitter::split( s , "," );
ASSERT_EQUALS( s , StringSplitter::join( v , "," ) );
}
- void run(){
+ void run() {
test( "a" );
test( "a,b" );
test( "a,b,c" );
@@ -496,16 +531,68 @@ namespace BasicTests {
};
+ class QueueTest {
+ public:
+ void run() {
+ BlockingQueue<int> q;
+ Timer t;
+ int x;
+ ASSERT( ! q.blockingPop( x , 5 ) );
+ ASSERT( t.seconds() > 3 && t.seconds() < 9 );
+
+ }
+ };
+
+ class StrTests {
+ public:
+
+ void run() {
+ ASSERT_EQUALS( 1u , str::count( "abc" , 'b' ) );
+ ASSERT_EQUALS( 3u , str::count( "babab" , 'b' ) );
+ }
+
+ };
+
+ class HostAndPortTests {
+ public:
+ void run() {
+ HostAndPort a( "x1" , 1000 );
+ HostAndPort b( "x1" , 1000 );
+ HostAndPort c( "x1" , 1001 );
+ HostAndPort d( "x2" , 1000 );
+
+ ASSERT( a == b );
+ ASSERT( a != c );
+ ASSERT( a != d );
+
+ }
+ };
+
+ class RelativePathTest {
+ public:
+ void run() {
+ RelativePath a = RelativePath::fromRelativePath( "a" );
+ RelativePath b = RelativePath::fromRelativePath( "a" );
+ RelativePath c = RelativePath::fromRelativePath( "b" );
+ RelativePath d = RelativePath::fromRelativePath( "a/b" );
+
+
+ ASSERT( a == b );
+ ASSERT( a != c );
+ ASSERT( a != d );
+ ASSERT( c != d );
+ }
+ };
class All : public Suite {
public:
- All() : Suite( "basic" ){
+ All() : Suite( "basic" ) {
}
-
- void setupTests(){
+
+ void setupTests() {
add< Rarely >();
add< Base64Tests >();
-
+
add< stringbuildertests::simple1 >();
add< stringbuildertests::simple2 >();
add< stringbuildertests::reset1 >();
@@ -513,18 +600,28 @@ namespace BasicTests {
add< sleeptest >();
add< AssertTests >();
-
+
add< ArrayTests::basic1 >();
add< LexNumCmp >();
add< DatabaseValidNames >();
+ add< DatabaseOwnsNS >();
+
+ add< NSValidNames >();
add< PtrTests >();
add< StringSplitterTest >();
add< IsValidUTF8Test >();
+
+ add< QueueTest >();
+
+ add< StrTests >();
+
+ add< HostAndPortTests >();
+ add< RelativePathTest >();
}
} myall;
-
+
} // namespace BasicTests
diff --git a/dbtests/btreetests.cpp b/dbtests/btreetests.cpp
index a90a097..4da7375 100644
--- a/dbtests/btreetests.cpp
+++ b/dbtests/btreetests.cpp
@@ -29,7 +29,12 @@ namespace BtreeTests {
const char* ns() {
return "unittests.btreetests";
}
-
+
+ // dummy, valid record loc
+ const DiskLoc recordLoc() {
+ return DiskLoc( 0, 2 );
+ }
+
class Ensure {
public:
Ensure() {
@@ -41,45 +46,55 @@ namespace BtreeTests {
private:
DBDirectClient _c;
};
-
+
class Base : public Ensure {
public:
- Base() :
- _context( ns() ) {
+ Base() :
+ _context( ns() ) {
{
bool f = false;
assert( f = true );
massert( 10402 , "assert is misdefined", f);
}
}
+ virtual ~Base() {}
+ static string bigNumString( long long n, int len = 800 ) {
+ char sub[17];
+ sprintf( sub, "%.16llx", n );
+ string val( len, ' ' );
+ for( int i = 0; i < len; ++i ) {
+ val[ i ] = sub[ i % 16 ];
+ }
+ return val;
+ }
protected:
- BtreeBucket* bt() {
+ const BtreeBucket* bt() {
return id().head.btree();
}
DiskLoc dl() {
return id().head;
}
IndexDetails& id() {
- return nsdetails( ns() )->idx( 1 );
- }
- // dummy, valid record loc
- static DiskLoc recordLoc() {
- return DiskLoc( 0, 2 );
+ NamespaceDetails *nsd = nsdetails( ns() );
+ assert( nsd );
+ return nsd->idx( 1 );
}
void checkValid( int nKeys ) {
ASSERT( bt() );
ASSERT( bt()->isHead() );
bt()->assertValid( order(), true );
- ASSERT_EQUALS( nKeys, bt()->fullValidate( dl(), order() ) );
+ ASSERT_EQUALS( nKeys, bt()->fullValidate( dl(), order(), 0, true ) );
}
void dump() {
bt()->dumpTree( dl(), order() );
}
void insert( BSONObj &key ) {
bt()->bt_insert( dl(), recordLoc(), key, Ordering::make(order()), true, id(), true );
+ getDur().commitIfNeeded();
}
- void unindex( BSONObj &key ) {
- bt()->unindex( dl(), id(), key, recordLoc() );
+ bool unindex( BSONObj &key ) {
+ getDur().commitIfNeeded();
+ return bt()->unindex( dl(), id(), key, recordLoc() );
}
static BSONObj simpleKey( char c, int n = 1 ) {
BSONObjBuilder builder;
@@ -98,9 +113,38 @@ namespace BtreeTests {
ASSERT( location == expectedLocation );
ASSERT_EQUALS( expectedPos, pos );
}
+ bool present( BSONObj &key, int direction ) {
+ int pos;
+ bool found;
+ bt()->locate( id(), dl(), key, Ordering::make(order()), pos, found, recordLoc(), direction );
+ return found;
+ }
BSONObj order() {
return id().keyPattern();
}
+ const BtreeBucket *child( const BtreeBucket *b, int i ) {
+ assert( i <= b->nKeys() );
+ DiskLoc d;
+ if ( i == b->nKeys() ) {
+ d = b->getNextChild();
+ }
+ else {
+ d = const_cast< DiskLoc& >( b->keyNode( i ).prevChildBucket );
+ }
+ assert( !d.isNull() );
+ return d.btree();
+ }
+ void checkKey( char i ) {
+ stringstream ss;
+ ss << i;
+ checkKey( ss.str() );
+ }
+ void checkKey( const string &k ) {
+ BSONObj key = BSON( "" << k );
+// log() << "key: " << key << endl;
+ ASSERT( present( key, 1 ) );
+ ASSERT( present( key, -1 ) );
+ }
private:
dblock lk_;
Client::Context _context;
@@ -140,6 +184,8 @@ namespace BtreeTests {
insert( longKey );
}
checkValid( 20 );
+ ASSERT_EQUALS( 1, bt()->nKeys() );
+ checkSplit();
}
protected:
virtual char shortToken( int i ) const = 0;
@@ -150,6 +196,7 @@ namespace BtreeTests {
static char rightToken( int i ) {
return 'z' - i;
}
+ virtual void checkSplit() = 0;
};
class SplitRightHeavyBucket : public SplitUnevenBucketBase {
@@ -160,6 +207,10 @@ namespace BtreeTests {
virtual char longToken( int i ) const {
return rightToken( i );
}
+ virtual void checkSplit() {
+ ASSERT_EQUALS( 15, child( bt(), 0 )->nKeys() );
+ ASSERT_EQUALS( 4, child( bt(), 1 )->nKeys() );
+ }
};
class SplitLeftHeavyBucket : public SplitUnevenBucketBase {
@@ -170,6 +221,10 @@ namespace BtreeTests {
virtual char longToken( int i ) const {
return leftToken( i );
}
+ virtual void checkSplit() {
+ ASSERT_EQUALS( 4, child( bt(), 0 )->nKeys() );
+ ASSERT_EQUALS( 15, child( bt(), 1 )->nKeys() );
+ }
};
class MissingLocate : public Base {
@@ -225,7 +280,7 @@ namespace BtreeTests {
}
void insert( int i ) {
BSONObj k = key( 'b' + 2 * i );
- Base::insert( k );
+ Base::insert( k );
}
};
@@ -247,20 +302,21 @@ namespace BtreeTests {
}
void insert( int i ) {
BSONObj k = key( 'b' + 2 * i );
- Base::insert( k );
- }
+ Base::insert( k );
+ }
};
- class ReuseUnused : public Base {
+ class DontReuseUnused : public Base {
public:
void run() {
for ( int i = 0; i < 10; ++i ) {
insert( i );
}
+// dump();
BSONObj root = key( 'p' );
unindex( root );
Base::insert( root );
- locate( root, 0, true, dl(), 1 );
+ locate( root, 0, true, bt()->getNextChild(), 1 );
}
private:
BSONObj key( char c ) {
@@ -268,16 +324,17 @@ namespace BtreeTests {
}
void insert( int i ) {
BSONObj k = key( 'b' + 2 * i );
- Base::insert( k );
- }
+ Base::insert( k );
+ }
};
-
+
class PackUnused : public Base {
public:
void run() {
for ( long long i = 0; i < 1000000; i += 1000 ) {
insert( i );
}
+// dump();
string orig, after;
{
stringstream ss;
@@ -294,8 +351,9 @@ namespace BtreeTests {
while( c->ok() ) {
if ( !c->currKeyNode().prevChildBucket.isNull() ) {
toDel.push_back( c->currKey().firstElement().valuestr() );
- } else {
- other.push_back( c->currKey().firstElement().valuestr() );
+ }
+ else {
+ other.push_back( c->currKey().firstElement().valuestr() );
}
c->advance();
}
@@ -311,30 +369,25 @@ namespace BtreeTests {
}
int unused = 0;
- ASSERT_EQUALS( 0, bt()->fullValidate( dl(), order(), &unused ) );
+ ASSERT_EQUALS( 0, bt()->fullValidate( dl(), order(), &unused, true ) );
for ( long long i = 50000; i < 50100; ++i ) {
insert( i );
- }
+ }
int unused2 = 0;
- ASSERT_EQUALS( 100, bt()->fullValidate( dl(), order(), &unused2 ) );
+ ASSERT_EQUALS( 100, bt()->fullValidate( dl(), order(), &unused2, true ) );
- ASSERT( unused2 < unused );
+// log() << "old unused: " << unused << ", new unused: " << unused2 << endl;
+//
+ ASSERT( unused2 <= unused );
}
protected:
void insert( long long n ) {
- string val( 800, ' ' );
- for( int i = 0; i < 800; i += 8 ) {
- for( int j = 0; j < 8; ++j ) {
- // probably we won't get > 56 bits
- unsigned char v = 0x80 | ( n >> ( ( 8 - j - 1 ) * 7 ) & 0x000000000000007f );
- val[ i + j ] = v;
- }
- }
+ string val = bigNumString( n );
BSONObj k = BSON( "a" << val );
- Base::insert( k );
- }
+ Base::insert( k );
+ }
};
class DontDropReferenceKey : public PackUnused {
@@ -344,7 +397,7 @@ namespace BtreeTests {
for ( long long i = 0; i < 80; i += 1 ) {
insert( i );
}
-
+
BSONObjBuilder start;
start.appendMinKey( "a" );
BSONObjBuilder end;
@@ -360,19 +413,1220 @@ namespace BtreeTests {
c->advance();
}
// too much work to try to make this happen through inserts and deletes
- const_cast< DiskLoc& >( bt()->keyNode( 1 ).prevChildBucket ) = DiskLoc();
- const_cast< DiskLoc& >( bt()->keyNode( 1 ).recordLoc ).GETOFS() |= 1; // make unused
+ // we are intentionally manipulating the btree bucket directly here
+ getDur().writingDiskLoc( const_cast< DiskLoc& >( bt()->keyNode( 1 ).prevChildBucket ) ) = DiskLoc();
+ getDur().writingInt( const_cast< DiskLoc& >( bt()->keyNode( 1 ).recordLoc ).GETOFS() ) |= 1; // make unused
BSONObj k = BSON( "a" << toInsert );
Base::insert( k );
}
};
-
+
+ class MergeBuckets : public Base {
+ public:
+ virtual ~MergeBuckets() {}
+ void run() {
+ for ( int i = 0; i < 10; ++i ) {
+ insert( i );
+ }
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ int expectedCount = 10 - unindexKeys();
+// dump();
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ int unused = 0;
+ ASSERT_EQUALS( expectedCount, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ }
+ protected:
+ BSONObj key( char c ) {
+ return simpleKey( c, 800 );
+ }
+ void insert( int i ) {
+ BSONObj k = key( 'b' + 2 * i );
+ Base::insert( k );
+ }
+ virtual int unindexKeys() = 0;
+ };
+
+ class MergeBucketsLeft : public MergeBuckets {
+ virtual int unindexKeys() {
+ BSONObj k = key( 'b' );
+ unindex( k );
+ k = key( 'b' + 2 );
+ unindex( k );
+ k = key( 'b' + 4 );
+ unindex( k );
+ k = key( 'b' + 6 );
+ unindex( k );
+ return 4;
+ }
+ };
+
+ class MergeBucketsRight : public MergeBuckets {
+ virtual int unindexKeys() {
+ BSONObj k = key( 'b' + 2 * 9 );
+ unindex( k );
+ return 1;
+ }
+ };
+
+ // deleting from head won't coalesce yet
+// class MergeBucketsHead : public MergeBuckets {
+// virtual BSONObj unindexKey() { return key( 'p' ); }
+// };
+
+ class MergeBucketsDontReplaceHead : public Base {
+ public:
+ void run() {
+ for ( int i = 0; i < 18; ++i ) {
+ insert( i );
+ }
+ // dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = key( 'a' + 17 );
+ unindex( k );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ int unused = 0;
+ ASSERT_EQUALS( 17, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ }
+ private:
+ BSONObj key( char c ) {
+ return simpleKey( c, 800 );
+ }
+ void insert( int i ) {
+ BSONObj k = key( 'a' + i );
+ Base::insert( k );
+ }
+ };
+
+ // Tool to construct custom trees for tests.
+ class ArtificialTree : public BtreeBucket {
+ public:
+ void push( const BSONObj &key, const DiskLoc &child ) {
+ pushBack( dummyDiskLoc(), key, Ordering::make( BSON( "a" << 1 ) ), child );
+ }
+ void setNext( const DiskLoc &child ) {
+ nextChild = child;
+ }
+ static DiskLoc make( IndexDetails &id ) {
+ DiskLoc ret = addBucket( id );
+ is( ret )->init();
+ getDur().commitIfNeeded();
+ return ret;
+ }
+ static ArtificialTree *is( const DiskLoc &l ) {
+ return static_cast< ArtificialTree * >( l.btreemod() );
+ }
+ static DiskLoc makeTree( const string &spec, IndexDetails &id ) {
+ return makeTree( fromjson( spec ), id );
+ }
+ static DiskLoc makeTree( const BSONObj &spec, IndexDetails &id ) {
+ DiskLoc node = make( id );
+ ArtificialTree *n = ArtificialTree::is( node );
+ BSONObjIterator i( spec );
+ while( i.more() ) {
+ BSONElement e = i.next();
+ DiskLoc child;
+ if ( e.type() == Object ) {
+ child = makeTree( e.embeddedObject(), id );
+ }
+ if ( e.fieldName() == string( "_" ) ) {
+ n->setNext( child );
+ }
+ else {
+ n->push( BSON( "" << expectedKey( e.fieldName() ) ), child );
+ }
+ }
+ n->fixParentPtrs( node );
+ return node;
+ }
+ static void setTree( const string &spec, IndexDetails &id ) {
+ set( makeTree( spec, id ), id );
+ }
+ static void set( const DiskLoc &l, IndexDetails &id ) {
+ ArtificialTree::is( id.head )->deallocBucket( id.head, id );
+ getDur().writingDiskLoc(id.head) = l;
+ }
+ static string expectedKey( const char *spec ) {
+ if ( spec[ 0 ] != '$' ) {
+ return spec;
+ }
+ char *endPtr;
+ // parsing a long long is a pain, so just allow shorter keys for now
+ unsigned long long num = strtol( spec + 1, &endPtr, 16 );
+ int len = 800;
+ if( *endPtr == '$' ) {
+ len = strtol( endPtr + 1, 0, 16 );
+ }
+ return Base::bigNumString( num, len );
+ }
+ static void checkStructure( const BSONObj &spec, const IndexDetails &id, const DiskLoc node ) {
+ ArtificialTree *n = ArtificialTree::is( node );
+ BSONObjIterator j( spec );
+ for( int i = 0; i < n->n; ++i ) {
+ ASSERT( j.more() );
+ BSONElement e = j.next();
+ KeyNode kn = n->keyNode( i );
+ string expected = expectedKey( e.fieldName() );
+ ASSERT( present( id, BSON( "" << expected ), 1 ) );
+ ASSERT( present( id, BSON( "" << expected ), -1 ) );
+ ASSERT_EQUALS( expected, kn.key.firstElement().valuestr() );
+ if ( kn.prevChildBucket.isNull() ) {
+ ASSERT( e.type() == jstNULL );
+ }
+ else {
+ ASSERT( e.type() == Object );
+ checkStructure( e.embeddedObject(), id, kn.prevChildBucket );
+ }
+ }
+ if ( n->nextChild.isNull() ) {
+ // maybe should allow '_' field with null value?
+ ASSERT( !j.more() );
+ }
+ else {
+ BSONElement e = j.next();
+ ASSERT_EQUALS( string( "_" ), e.fieldName() );
+ ASSERT( e.type() == Object );
+ checkStructure( e.embeddedObject(), id, n->nextChild );
+ }
+ ASSERT( !j.more() );
+ }
+ static void checkStructure( const string &spec, const IndexDetails &id ) {
+ checkStructure( fromjson( spec ), id, id.head );
+ }
+ static bool present( const IndexDetails &id, const BSONObj &key, int direction ) {
+ int pos;
+ bool found;
+ id.head.btree()->locate( id, id.head, key, Ordering::make(id.keyPattern()), pos, found, recordLoc(), direction );
+ return found;
+ }
+ int headerSize() const { return BtreeBucket::headerSize(); }
+ int packedDataSize( int pos ) const { return BtreeBucket::packedDataSize( pos ); }
+ void fixParentPtrs( const DiskLoc &thisLoc ) { BtreeBucket::fixParentPtrs( thisLoc ); }
+ void forcePack() {
+ topSize += emptySize;
+ emptySize = 0;
+ setNotPacked();
+ }
+ private:
+ DiskLoc dummyDiskLoc() const { return DiskLoc( 0, 2 ); }
+ };
+
+ /**
+ * We could probably refactor the following tests, but it's easier to debug
+ * them in the present state.
+ */
+
+ class MergeBucketsDelInternal : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},bb:null,_:{c:null}},_:{f:{e:null},_:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "bb" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{b:{a:null},d:{c:null},f:{e:null},_:{g:null}}", id() );
+ }
+ };
+
+ class MergeBucketsRightNull : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},bb:null,cc:{c:null}},_:{f:{e:null},h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "bb" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{b:{a:null},cc:{c:null},d:null,f:{e:null},h:{g:null}}", id() );
+ }
+ };
+
+ // not yet handling this case
+ class DontMergeSingleBucket : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},c:null}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "c" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{d:{b:{a:null}}}", id() );
+ }
+ };
+
+ class ParentMergeNonRightToLeft : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},bb:null,cc:{c:null}},i:{f:{e:null},h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "bb" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ // child does not currently replace parent in this case
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{i:{b:{a:null},cc:{c:null},d:null,f:{e:null},h:{g:null}}}", id() );
+ }
+ };
+
+ class ParentMergeNonRightToRight : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},cc:{c:null}},i:{f:{e:null},ff:null,h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "ff" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ // child does not currently replace parent in this case
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{i:{b:{a:null},cc:{c:null},d:null,f:{e:null},h:{g:null}}}", id() );
+ }
+ };
+
+ class CantMergeRightNoMerge : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},bb:null,cc:{c:null}},dd:null,_:{f:{e:null},h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "bb" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{d:{b:{a:null},cc:{c:null}},dd:null,_:{f:{e:null},h:{g:null}}}", id() );
+ }
+ };
+
+ class CantMergeLeftNoMerge : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{c:{b:{a:null}},d:null,_:{f:{e:null},g:null}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "g" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 6, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{c:{b:{a:null}},d:null,_:{f:{e:null}}}", id() );
+ }
+ };
+
+ class MergeOption : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{c:{b:{a:null}},f:{e:{d:null},ee:null},_:{h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "ee" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{c:{b:{a:null}},_:{e:{d:null},f:null,h:{g:null}}}", id() );
+ }
+ };
+
+ class ForceMergeLeft : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{c:{b:{a:null}},f:{e:{d:null},ee:null},ff:null,_:{h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "ee" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{f:{b:{a:null},c:null,e:{d:null}},ff:null,_:{h:{g:null}}}", id() );
+ }
+ };
+
+ class ForceMergeRight : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{c:{b:{a:null}},cc:null,f:{e:{d:null},ee:null},_:{h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "ee" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{c:{b:{a:null}},cc:null,_:{e:{d:null},f:null,h:{g:null}}}", id() );
+ }
+ };
+
+ class RecursiveMerge : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{h:{e:{b:{a:null},c:null,d:null},g:{f:null}},j:{i:null}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "c" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ // height is not currently reduced in this case
+ ArtificialTree::checkStructure( "{j:{g:{b:{a:null},d:null,e:null,f:null},h:null,i:null}}", id() );
+ }
+ };
+
+ class RecursiveMergeRightBucket : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{h:{e:{b:{a:null},c:null,d:null},g:{f:null}},_:{i:null}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "c" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{g:{b:{a:null},d:null,e:null,f:null},h:null,i:null}", id() );
+ }
+ };
+
+ class RecursiveMergeDoubleRightBucket : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{h:{e:{b:{a:null},c:null,d:null},_:{f:null}},_:{i:null}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "c" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ // no recursion currently in this case
+ ArtificialTree::checkStructure( "{h:{b:{a:null},d:null,e:null,f:null},_:{i:null}}", id() );
+ }
+ };
+
+ class MergeSizeBase : public Base {
+ public:
+ MergeSizeBase() : _count() {}
+ virtual ~MergeSizeBase() {}
+ void run() {
+ typedef ArtificialTree A;
+ A::set( A::make( id() ), id() );
+ A* root = A::is( dl() );
+ DiskLoc left = A::make( id() );
+ root->push( biggestKey( 'm' ), left );
+ _count = 1;
+ A* l = A::is( left );
+ DiskLoc right = A::make( id() );
+ root->setNext( right );
+ A* r = A::is( right );
+ root->fixParentPtrs( dl() );
+
+ ASSERT_EQUALS( bigSize(), bigSize() / 2 * 2 );
+ fillToExactSize( l, leftSize(), 'a' );
+ fillToExactSize( r, rightSize(), 'n' );
+ ASSERT( leftAdditional() <= 2 );
+ if ( leftAdditional() >= 2 ) {
+ l->push( bigKey( 'k' ), DiskLoc() );
+ }
+ if ( leftAdditional() >= 1 ) {
+ l->push( bigKey( 'l' ), DiskLoc() );
+ }
+ ASSERT( rightAdditional() <= 2 );
+ if ( rightAdditional() >= 2 ) {
+ r->push( bigKey( 'y' ), DiskLoc() );
+ }
+ if ( rightAdditional() >= 1 ) {
+ r->push( bigKey( 'z' ), DiskLoc() );
+ }
+ _count += leftAdditional() + rightAdditional();
+
+// dump();
+
+ initCheck();
+ string ns = id().indexNamespace();
+ const char *keys = delKeys();
+ for( const char *i = keys; *i; ++i ) {
+ int unused = 0;
+ ASSERT_EQUALS( _count, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = bigKey( *i );
+ unindex( k );
+// dump();
+ --_count;
+ }
+
+// dump();
+
+ int unused = 0;
+ ASSERT_EQUALS( _count, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ validate();
+ if ( !merge() ) {
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ }
+ else {
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ }
+ }
+ protected:
+ virtual int leftAdditional() const { return 2; }
+ virtual int rightAdditional() const { return 2; }
+ virtual void initCheck() {}
+ virtual void validate() {}
+ virtual int leftSize() const = 0;
+ virtual int rightSize() const = 0;
+ virtual const char * delKeys() const { return "klyz"; }
+ virtual bool merge() const { return true; }
+ void fillToExactSize( ArtificialTree *t, int targetSize, char startKey ) {
+ int size = 0;
+ while( size < targetSize ) {
+ int space = targetSize - size;
+ int nextSize = space - sizeof( _KeyNode );
+ assert( nextSize > 0 );
+ BSONObj newKey = key( startKey++, nextSize );
+ t->push( newKey, DiskLoc() );
+ size += newKey.objsize() + sizeof( _KeyNode );
+ _count += 1;
+ }
+ ASSERT_EQUALS( t->packedDataSize( 0 ), targetSize );
+ }
+ static BSONObj key( char a, int size ) {
+ if ( size >= bigSize() ) {
+ return bigKey( a );
+ }
+ return simpleKey( a, size - ( bigSize() - 801 ) );
+ }
+ static BSONObj bigKey( char a ) {
+ return simpleKey( a, 801 );
+ }
+ static BSONObj biggestKey( char a ) {
+ int size = BtreeBucket::getKeyMax() - bigSize() + 801;
+ return simpleKey( a, size );
+ }
+ static int bigSize() {
+ return bigKey( 'a' ).objsize();
+ }
+ static int biggestSize() {
+ return biggestKey( 'a' ).objsize();
+ }
+ int _count;
+ };
+
+ class MergeSizeJustRightRight : public MergeSizeBase {
+ protected:
+ virtual int rightSize() const { return BtreeBucket::getLowWaterMark() - 1; }
+ virtual int leftSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) - ( BtreeBucket::getLowWaterMark() - 1 ); }
+ };
+
+ class MergeSizeJustRightLeft : public MergeSizeBase {
+ protected:
+ virtual int leftSize() const { return BtreeBucket::getLowWaterMark() - 1; }
+ virtual int rightSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) - ( BtreeBucket::getLowWaterMark() - 1 ); }
+ virtual const char * delKeys() const { return "yzkl"; }
+ };
+
+ class MergeSizeRight : public MergeSizeJustRightRight {
+ virtual int rightSize() const { return MergeSizeJustRightRight::rightSize() - 1; }
+ virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() + 1; }
+ };
+
+ class MergeSizeLeft : public MergeSizeJustRightLeft {
+ virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() + 1; }
+ virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize() - 1; }
+ };
+
+ class NoMergeBelowMarkRight : public MergeSizeJustRightRight {
+ virtual int rightSize() const { return MergeSizeJustRightRight::rightSize() + 1; }
+ virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() - 1; }
+ virtual bool merge() const { return false; }
+ };
+
+ class NoMergeBelowMarkLeft : public MergeSizeJustRightLeft {
+ virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() - 1; }
+ virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize() + 1; }
+ virtual bool merge() const { return false; }
+ };
+
+ class MergeSizeRightTooBig : public MergeSizeJustRightLeft {
+ virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() + 1; }
+ virtual bool merge() const { return false; }
+ };
+
+ class MergeSizeLeftTooBig : public MergeSizeJustRightRight {
+ virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() + 1; }
+ virtual bool merge() const { return false; }
+ };
+
+ class BalanceOneLeftToRight : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},b:{$20:null,$30:null,$40:null,$50:null,a:null},_:{c:null}}", id() );
+ ASSERT_EQUALS( 14, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x40 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$6:{$1:null,$2:null,$3:null,$4:null,$5:null},b:{$10:null,$20:null,$30:null,$50:null,a:null},_:{c:null}}", id() );
+ }
+ };
+
+ class BalanceOneRightToLeft : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null},b:{$20:null,$30:null,$40:null,$50:null,$60:null,$70:null},_:{c:null}}", id() );
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x3 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$20:{$1:null,$2:null,$4:null,$10:null},b:{$30:null,$40:null,$50:null,$60:null,$70:null},_:{c:null}}", id() );
+ }
+ };
+
+ class BalanceThreeLeftToRight : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$20:{$1:{$0:null},$3:{$2:null},$5:{$4:null},$7:{$6:null},$9:{$8:null},$11:{$10:null},$13:{$12:null},_:{$14:null}},b:{$30:null,$40:{$35:null},$50:{$45:null}},_:{c:null}}", id() );
+ ASSERT_EQUALS( 23, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 14, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x30 ) );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 22, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 14, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$9:{$1:{$0:null},$3:{$2:null},$5:{$4:null},$7:{$6:null},_:{$8:null}},b:{$11:{$10:null},$13:{$12:null},$20:{$14:null},$40:{$35:null},$50:{$45:null}},_:{c:null}}", id() );
+ }
+ };
+
+ class BalanceThreeRightToLeft : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$20:{$1:{$0:null},$3:{$2:null},$5:null,_:{$14:null}},b:{$30:{$25:null},$40:{$35:null},$50:{$45:null},$60:{$55:null},$70:{$65:null},$80:{$75:null},$90:{$85:null},$100:{$95:null}},_:{c:null}}", id() );
+ ASSERT_EQUALS( 25, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 15, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x5 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 24, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 15, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$50:{$1:{$0:null},$3:{$2:null},$20:{$14:null},$30:{$25:null},$40:{$35:null},_:{$45:null}},b:{$60:{$55:null},$70:{$65:null},$80:{$75:null},$90:{$85:null},$100:{$95:null}},_:{c:null}}", id() );
+ }
+ };
+
+ class BalanceSingleParentKey : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},_:{$20:null,$30:null,$40:null,$50:null,a:null}}", id() );
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x40 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$6:{$1:null,$2:null,$3:null,$4:null,$5:null},_:{$10:null,$20:null,$30:null,$50:null,a:null}}", id() );
+ }
+ };
+
+ class PackEmpty : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null}", id() );
+ BSONObj k = BSON( "" << "a" );
+ ASSERT( unindex( k ) );
+ ArtificialTree *t = ArtificialTree::is( dl() );
+ t->forcePack();
+ Tester::checkEmpty( t, id() );
+ }
+ class Tester : public ArtificialTree {
+ public:
+ static void checkEmpty( ArtificialTree *a, const IndexDetails &id ) {
+ Tester *t = static_cast< Tester * >( a );
+ ASSERT_EQUALS( 0, t->n );
+ ASSERT( !( t->flags & Packed ) );
+ Ordering o = Ordering::make( id.keyPattern() );
+ int zero = 0;
+ t->_packReadyForMod( o, zero );
+ ASSERT_EQUALS( 0, t->n );
+ ASSERT_EQUALS( 0, t->topSize );
+ ASSERT_EQUALS( BtreeBucket::bodySize(), t->emptySize );
+ ASSERT( t->flags & Packed );
+ }
+ };
+ };
+
+ class PackedDataSizeEmpty : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null}", id() );
+ BSONObj k = BSON( "" << "a" );
+ ASSERT( unindex( k ) );
+ ArtificialTree *t = ArtificialTree::is( dl() );
+ t->forcePack();
+ Tester::checkEmpty( t, id() );
+ }
+ class Tester : public ArtificialTree {
+ public:
+ static void checkEmpty( ArtificialTree *a, const IndexDetails &id ) {
+ Tester *t = static_cast< Tester * >( a );
+ ASSERT_EQUALS( 0, t->n );
+ ASSERT( !( t->flags & Packed ) );
+ int zero = 0;
+ ASSERT_EQUALS( 0, t->packedDataSize( zero ) );
+ ASSERT( !( t->flags & Packed ) );
+ }
+ };
+ };
+
+ class BalanceSingleParentKeyPackParent : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},_:{$20:null,$30:null,$40:null,$50:null,a:null}}", id() );
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ // force parent pack
+ ArtificialTree::is( dl() )->forcePack();
+ BSONObj k = BSON( "" << bigNumString( 0x40 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$6:{$1:null,$2:null,$3:null,$4:null,$5:null},_:{$10:null,$20:null,$30:null,$50:null,a:null}}", id() );
+ }
+ };
+
+ class BalanceSplitParent : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10$10:{$1:null,$2:null,$3:null,$4:null},$100:{$20:null,$30:null,$40:null,$50:null,$60:null,$70:null,$80:null},$200:null,$300:null,$400:null,$500:null,$600:null,$700:null,$800:null,$900:null,_:{c:null}}", id() );
+ ASSERT_EQUALS( 22, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x3 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 21, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$500:{$30:{$1:null,$2:null,$4:null,$10$10:null,$20:null},$100:{$40:null,$50:null,$60:null,$70:null,$80:null},$200:null,$300:null,$400:null},_:{$600:null,$700:null,$800:null,$900:null,_:{c:null}}}", id() );
+ }
+ };
+
+ class RebalancedSeparatorBase : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( treeSpec(), id() );
+ modTree();
+ Tester::checkSeparator( id(), expectedSeparator() );
+ }
+ virtual string treeSpec() const = 0;
+ virtual int expectedSeparator() const = 0;
+ virtual void modTree() {}
+ struct Tester : public ArtificialTree {
+ static void checkSeparator( const IndexDetails& id, int expected ) {
+ ASSERT_EQUALS( expected, static_cast< Tester * >( id.head.btreemod() )->rebalancedSeparatorPos( id.head, 0 ) );
+ }
+ };
+ };
+
+ class EvenRebalanceLeft : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$7:{$1:null,$2$31f:null,$3:null,$4$31f:null,$5:null,$6:null},_:{$8:null,$9:null,$10$31e:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class EvenRebalanceLeftCusp : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$6:{$1:null,$2$31f:null,$3:null,$4$31f:null,$5:null},_:{$7:null,$8:null,$9$31e:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class EvenRebalanceRight : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$3:{$1:null,$2$31f:null},_:{$4$31f:null,$5:null,$6:null,$7:null,$8$31e:null,$9:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class EvenRebalanceRightCusp : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$4$31f:{$1:null,$2$31f:null,$3:null},_:{$5:null,$6:null,$7$31e:null,$8:null,$9:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class EvenRebalanceCenter : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$5:{$1:null,$2$31f:null,$3:null,$4$31f:null},_:{$6:null,$7$31e:null,$8:null,$9:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class OddRebalanceLeft : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$6$31f:{$1:null,$2:null,$3:null,$4:null,$5:null},_:{$7:null,$8:null,$9:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class OddRebalanceRight : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$4:{$1:null,$2:null,$3:null},_:{$5:null,$6:null,$7:null,$8$31f:null,$9:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class OddRebalanceCenter : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$5:{$1:null,$2:null,$3:null,$4:null},_:{$6:null,$7:null,$8:null,$9:null,$10$31f:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class RebalanceEmptyRight : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$a:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null,$7:null,$8:null,$9:null},_:{$b:null}}"; }
+ virtual void modTree() {
+ BSONObj k = BSON( "" << bigNumString( 0xb ) );
+ ASSERT( unindex( k ) );
+ }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class RebalanceEmptyLeft : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$a:{$1:null},_:{$11:null,$12:null,$13:null,$14:null,$15:null,$16:null,$17:null,$18:null,$19:null}}"; }
+ virtual void modTree() {
+ BSONObj k = BSON( "" << bigNumString( 0x1 ) );
+ ASSERT( unindex( k ) );
+ }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class NoMoveAtLowWaterMarkRight : public MergeSizeJustRightRight {
+ virtual int rightSize() const { return MergeSizeJustRightRight::rightSize() + 1; }
+ virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key; }
+ virtual void validate() { ASSERT_EQUALS( _oldTop, bt()->keyNode( 0 ).key ); }
+ virtual bool merge() const { return false; }
+ protected:
+ BSONObj _oldTop;
+ };
+
+ class MoveBelowLowWaterMarkRight : public NoMoveAtLowWaterMarkRight {
+ virtual int rightSize() const { return MergeSizeJustRightRight::rightSize(); }
+ virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() + 1; }
+ // different top means we rebalanced
+ virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key ) ); }
+ };
+
+ class NoMoveAtLowWaterMarkLeft : public MergeSizeJustRightLeft {
+ virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize() + 1; }
+ virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key; }
+ virtual void validate() { ASSERT_EQUALS( _oldTop, bt()->keyNode( 0 ).key ); }
+ virtual bool merge() const { return false; }
+ protected:
+ BSONObj _oldTop;
+ };
+
+ class MoveBelowLowWaterMarkLeft : public NoMoveAtLowWaterMarkLeft {
+ virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize(); }
+ virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() + 1; }
+ // different top means we rebalanced
+ virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key ) ); }
+ };
+
+ class PreferBalanceLeft : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},$20:{$11:null,$12:null,$13:null,$14:null},_:{$30:null}}", id() );
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x12 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$5:{$1:null,$2:null,$3:null,$4:null},$20:{$6:null,$10:null,$11:null,$13:null,$14:null},_:{$30:null}}", id() );
+ }
+ };
+
+ class PreferBalanceRight : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null},$20:{$11:null,$12:null,$13:null,$14:null},_:{$31:null,$32:null,$33:null,$34:null,$35:null,$36:null}}", id() );
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x12 ) );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$10:{$1:null},$31:{$11:null,$13:null,$14:null,$20:null},_:{$32:null,$33:null,$34:null,$35:null,$36:null}}", id() );
+ }
+ };
+
+ class RecursiveMergeThenBalance : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$5:{$1:null,$2:null},$8:{$6:null,$7:null}},_:{$20:null,$30:null,$40:null,$50:null,$60:null,$70:null,$80:null,$90:null}}", id() );
+ ASSERT_EQUALS( 15, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x7 ) );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 14, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$40:{$8:{$1:null,$2:null,$5:null,$6:null},$10:null,$20:null,$30:null},_:{$50:null,$60:null,$70:null,$80:null,$90:null}}", id() );
+ }
+ };
+
+ class MergeRightEmpty : public MergeSizeBase {
+ protected:
+ virtual int rightAdditional() const { return 1; }
+ virtual int leftAdditional() const { return 1; }
+ virtual const char * delKeys() const { return "lz"; }
+ virtual int rightSize() const { return 0; }
+ virtual int leftSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ); }
+ };
+
+ class MergeMinRightEmpty : public MergeSizeBase {
+ protected:
+ virtual int rightAdditional() const { return 1; }
+ virtual int leftAdditional() const { return 0; }
+ virtual const char * delKeys() const { return "z"; }
+ virtual int rightSize() const { return 0; }
+ virtual int leftSize() const { return bigSize() + sizeof( _KeyNode ); }
+ };
+
+ class MergeLeftEmpty : public MergeSizeBase {
+ protected:
+ virtual int rightAdditional() const { return 1; }
+ virtual int leftAdditional() const { return 1; }
+ virtual const char * delKeys() const { return "zl"; }
+ virtual int leftSize() const { return 0; }
+ virtual int rightSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ); }
+ };
+
+ class MergeMinLeftEmpty : public MergeSizeBase {
+ protected:
+ virtual int leftAdditional() const { return 1; }
+ virtual int rightAdditional() const { return 0; }
+ virtual const char * delKeys() const { return "l"; }
+ virtual int leftSize() const { return 0; }
+ virtual int rightSize() const { return bigSize() + sizeof( _KeyNode ); }
+ };
+
+ class BalanceRightEmpty : public MergeRightEmpty {
+ protected:
+ virtual int leftSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) + 1; }
+ virtual bool merge() const { return false; }
+ virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key; }
+ virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key ) ); }
+ private:
+ BSONObj _oldTop;
+ };
+
+ class BalanceLeftEmpty : public MergeLeftEmpty {
+ protected:
+ virtual int rightSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) + 1; }
+ virtual bool merge() const { return false; }
+ virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key; }
+ virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key ) ); }
+ private:
+ BSONObj _oldTop;
+ };
+
+ class DelEmptyNoNeighbors : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{b:{a:null}}", id() );
+ ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "a" );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 1, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{b:null}", id() );
+ }
+ };
+
+ class DelEmptyEmptyNeighbors : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,c:{b:null},d:null}", id() );
+ ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "b" );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{a:null,c:null,d:null}", id() );
+ }
+ };
+
+ class DelInternal : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,c:{b:null},d:null}", id() );
+ int unused = 0;
+ ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "c" );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{a:null,b:null,d:null}", id() );
+ }
+ };
+
+ class DelInternalReplaceWithUnused : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,c:{b:null},d:null}", id() );
+ getDur().writingInt( const_cast< DiskLoc& >( bt()->keyNode( 1 ).prevChildBucket.btree()->keyNode( 0 ).recordLoc ).GETOFS() ) |= 1; // make unused
+ int unused = 0;
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 1, unused );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "c" );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ unused = 0;
+ ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 1, unused );
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ // doesn't discriminate between used and unused
+ ArtificialTree::checkStructure( "{a:null,b:null,d:null}", id() );
+ }
+ };
+
+ class DelInternalReplaceRight : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,_:{b:null}}", id() );
+ int unused = 0;
+ ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "a" );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ unused = 0;
+ ASSERT_EQUALS( 1, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{b:null}", id() );
+ }
+ };
+
+ class DelInternalPromoteKey : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,y:{d:{c:{b:null}},_:{e:null}},z:null}", id() );
+ int unused = 0;
+ ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "y" );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ unused = 0;
+ ASSERT_EQUALS( 6, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{a:null,e:{c:{b:null},d:null},z:null}", id() );
+ }
+ };
+
+ class DelInternalPromoteRightKey : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,_:{e:{c:null},_:{f:null}}}", id() );
+ int unused = 0;
+ ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "a" );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ unused = 0;
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{c:null,_:{e:null,f:null}}", id() );
+ }
+ };
+
+ class DelInternalReplacementPrevNonNull : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,d:{c:{b:null}},e:null}", id() );
+ int unused = 0;
+ ASSERT_EQUALS( 5, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "d" );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 1, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{a:null,d:{c:{b:null}},e:null}", id() );
+ ASSERT( bt()->keyNode( 1 ).recordLoc.getOfs() & 1 ); // check 'unused' key
+ }
+ };
+
+ class DelInternalReplacementNextNonNull : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,_:{c:null,_:{d:null}}}", id() );
+ int unused = 0;
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "a" );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 1, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{a:null,_:{c:null,_:{d:null}}}", id() );
+ ASSERT( bt()->keyNode( 0 ).recordLoc.getOfs() & 1 ); // check 'unused' key
+ }
+ };
+
+ class DelInternalSplitPromoteLeft : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:null,$20:null,$30$10:{$25:{$23:null},_:{$27:null}},$40:null,$50:null,$60:null,$70:null,$80:null,$90:null,$100:null}", id() );
+ int unused = 0;
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x30, 0x10 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$60:{$10:null,$20:null,$27:{$23:null,$25:null},$40:null,$50:null},_:{$70:null,$80:null,$90:null,$100:null}}", id() );
+ }
+ };
+
+ class DelInternalSplitPromoteRight : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:null,$20:null,$30:null,$40:null,$50:null,$60:null,$70:null,$80:null,$90:null,$100$10:{$95:{$93:null},_:{$97:null}}}", id() );
+ int unused = 0;
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x100, 0x10 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$80:{$10:null,$20:null,$30:null,$40:null,$50:null,$60:null,$70:null},_:{$90:null,$97:{$93:null,$95:null}}}", id() );
+ }
+ };
+
class All : public Suite {
public:
- All() : Suite( "btree" ){
+ All() : Suite( "btree" ) {
}
-
- void setupTests(){
+
+ void setupTests() {
add< Create >();
add< SimpleInsertDelete >();
add< SplitRightHeavyBucket >();
@@ -380,9 +1634,77 @@ namespace BtreeTests {
add< MissingLocate >();
add< MissingLocateMultiBucket >();
add< SERVER983 >();
- add< ReuseUnused >();
+ add< DontReuseUnused >();
add< PackUnused >();
add< DontDropReferenceKey >();
+ add< MergeBucketsLeft >();
+ add< MergeBucketsRight >();
+// add< MergeBucketsHead >();
+ add< MergeBucketsDontReplaceHead >();
+ add< MergeBucketsDelInternal >();
+ add< MergeBucketsRightNull >();
+ add< DontMergeSingleBucket >();
+ add< ParentMergeNonRightToLeft >();
+ add< ParentMergeNonRightToRight >();
+ add< CantMergeRightNoMerge >();
+ add< CantMergeLeftNoMerge >();
+ add< MergeOption >();
+ add< ForceMergeLeft >();
+ add< ForceMergeRight >();
+ add< RecursiveMerge >();
+ add< RecursiveMergeRightBucket >();
+ add< RecursiveMergeDoubleRightBucket >();
+ add< MergeSizeJustRightRight >();
+ add< MergeSizeJustRightLeft >();
+ add< MergeSizeRight >();
+ add< MergeSizeLeft >();
+ add< NoMergeBelowMarkRight >();
+ add< NoMergeBelowMarkLeft >();
+ add< MergeSizeRightTooBig >();
+ add< MergeSizeLeftTooBig >();
+ add< BalanceOneLeftToRight >();
+ add< BalanceOneRightToLeft >();
+ add< BalanceThreeLeftToRight >();
+ add< BalanceThreeRightToLeft >();
+ add< BalanceSingleParentKey >();
+ add< PackEmpty >();
+ add< PackedDataSizeEmpty >();
+ add< BalanceSingleParentKeyPackParent >();
+ add< BalanceSplitParent >();
+ add< EvenRebalanceLeft >();
+ add< EvenRebalanceLeftCusp >();
+ add< EvenRebalanceRight >();
+ add< EvenRebalanceRightCusp >();
+ add< EvenRebalanceCenter >();
+ add< OddRebalanceLeft >();
+ add< OddRebalanceRight >();
+ add< OddRebalanceCenter >();
+ add< RebalanceEmptyRight >();
+ add< RebalanceEmptyLeft >();
+ add< NoMoveAtLowWaterMarkRight >();
+ add< MoveBelowLowWaterMarkRight >();
+ add< NoMoveAtLowWaterMarkLeft >();
+ add< MoveBelowLowWaterMarkLeft >();
+ add< PreferBalanceLeft >();
+ add< PreferBalanceRight >();
+ add< RecursiveMergeThenBalance >();
+ add< MergeRightEmpty >();
+ add< MergeMinRightEmpty >();
+ add< MergeLeftEmpty >();
+ add< MergeMinLeftEmpty >();
+ add< BalanceRightEmpty >();
+ add< BalanceLeftEmpty >();
+ add< DelEmptyNoNeighbors >();
+ add< DelEmptyEmptyNeighbors >();
+ add< DelInternal >();
+ add< DelInternalReplaceWithUnused >();
+ add< DelInternalReplaceRight >();
+ add< DelInternalPromoteKey >();
+ add< DelInternalPromoteRightKey >();
+ add< DelInternalReplacementPrevNonNull >();
+ add< DelInternalReplacementNextNonNull >();
+ add< DelInternalSplitPromoteLeft >();
+ add< DelInternalSplitPromoteRight >();
}
} myall;
}
diff --git a/dbtests/clienttests.cpp b/dbtests/clienttests.cpp
index 58287e9..f51b765 100644
--- a/dbtests/clienttests.cpp
+++ b/dbtests/clienttests.cpp
@@ -20,40 +20,40 @@
#include "../client/dbclient.h"
#include "dbtests.h"
#include "../db/concurrency.h"
-
+
namespace ClientTests {
-
+
class Base {
public:
-
- Base( string coll ){
+
+ Base( string coll ) {
_ns = (string)"test." + coll;
}
-
- virtual ~Base(){
+
+ virtual ~Base() {
db.dropCollection( _ns );
}
-
- const char * ns(){ return _ns.c_str(); }
-
+
+ const char * ns() { return _ns.c_str(); }
+
string _ns;
DBDirectClient db;
};
-
+
class DropIndex : public Base {
public:
- DropIndex() : Base( "dropindex" ){}
- void run(){
+ DropIndex() : Base( "dropindex" ) {}
+ void run() {
db.insert( ns() , BSON( "x" << 2 ) );
ASSERT_EQUALS( 1 , db.getIndexes( ns() )->itcount() );
-
+
db.ensureIndex( ns() , BSON( "x" << 1 ) );
ASSERT_EQUALS( 2 , db.getIndexes( ns() )->itcount() );
-
+
db.dropIndex( ns() , BSON( "x" << 1 ) );
ASSERT_EQUALS( 1 , db.getIndexes( ns() )->itcount() );
-
+
db.ensureIndex( ns() , BSON( "x" << 1 ) );
ASSERT_EQUALS( 2 , db.getIndexes( ns() )->itcount() );
@@ -61,18 +61,18 @@ namespace ClientTests {
ASSERT_EQUALS( 1 , db.getIndexes( ns() )->itcount() );
}
};
-
+
class ReIndex : public Base {
public:
- ReIndex() : Base( "reindex" ){}
- void run(){
-
+ ReIndex() : Base( "reindex" ) {}
+ void run() {
+
db.insert( ns() , BSON( "x" << 2 ) );
ASSERT_EQUALS( 1 , db.getIndexes( ns() )->itcount() );
-
+
db.ensureIndex( ns() , BSON( "x" << 1 ) );
ASSERT_EQUALS( 2 , db.getIndexes( ns() )->itcount() );
-
+
db.reIndex( ns() );
ASSERT_EQUALS( 2 , db.getIndexes( ns() )->itcount() );
}
@@ -81,15 +81,15 @@ namespace ClientTests {
class ReIndex2 : public Base {
public:
- ReIndex2() : Base( "reindex2" ){}
- void run(){
-
+ ReIndex2() : Base( "reindex2" ) {}
+ void run() {
+
db.insert( ns() , BSON( "x" << 2 ) );
ASSERT_EQUALS( 1 , db.getIndexes( ns() )->itcount() );
-
+
db.ensureIndex( ns() , BSON( "x" << 1 ) );
ASSERT_EQUALS( 2 , db.getIndexes( ns() )->itcount() );
-
+
BSONObj out;
ASSERT( db.runCommand( "test" , BSON( "reIndex" << "reindex2" ) , out ) );
ASSERT_EQUALS( 2 , out["nIndexes"].number() );
@@ -106,7 +106,7 @@ namespace ClientTests {
for( int i = 0; i < 1111; ++i )
db.insert( ns(), BSON( "a" << i << "b" << longs ) );
db.ensureIndex( ns(), BSON( "a" << 1 << "b" << 1 ) );
-
+
auto_ptr< DBClientCursor > c = db.query( ns(), Query().sort( BSON( "a" << 1 << "b" << 1 ) ) );
ASSERT_EQUALS( 1111, c->itcount() );
}
@@ -161,20 +161,37 @@ namespace ClientTests {
ASSERT( db.runCommand( "unittests", BSON( "collstats" << "clienttests.create" ), info ) );
}
};
+
+ class ConnectionStringTests {
+ public:
+ void run() {
+ {
+ ConnectionString s( "a/b,c,d" , ConnectionString::SET );
+ ASSERT_EQUALS( ConnectionString::SET , s.type() );
+ ASSERT_EQUALS( "a" , s.getSetName() );
+ vector<HostAndPort> v = s.getServers();
+ ASSERT_EQUALS( 3U , v.size() );
+ ASSERT_EQUALS( "b" , v[0].host() );
+ ASSERT_EQUALS( "c" , v[1].host() );
+ ASSERT_EQUALS( "d" , v[2].host() );
+ }
+ }
+ };
class All : public Suite {
public:
- All() : Suite( "client" ){
+ All() : Suite( "client" ) {
}
- void setupTests(){
+ void setupTests() {
add<DropIndex>();
add<ReIndex>();
add<ReIndex2>();
add<CS_10>();
add<PushBack>();
add<Create>();
+ add<ConnectionStringTests>();
}
-
+
} all;
}
diff --git a/dbtests/commandtests.cpp b/dbtests/commandtests.cpp
index fa0014d..fa6204d 100644
--- a/dbtests/commandtests.cpp
+++ b/dbtests/commandtests.cpp
@@ -23,19 +23,19 @@ using namespace mongo;
namespace CommandTests {
// one namespace per command
- namespace FileMD5{
+ namespace FileMD5 {
struct Base {
- Base(){
+ Base() {
db.dropCollection(ns());
db.ensureIndex(ns(), BSON( "files_id" << 1 << "n" << 1 ));
}
const char* ns() { return "test.fs.chunks"; }
-
+
DBDirectClient db;
};
struct Type0 : Base {
- void run(){
+ void run() {
{
BSONObjBuilder b;
b.genOID();
@@ -58,8 +58,8 @@ namespace CommandTests {
ASSERT_EQUALS( string("5eb63bbbe01eeed093cb22bb8f5acdc3") , result["md5"].valuestr() );
}
};
- struct Type2 : Base{
- void run(){
+ struct Type2 : Base {
+ void run() {
{
BSONObjBuilder b;
b.genOID();
@@ -86,13 +86,13 @@ namespace CommandTests {
class All : public Suite {
public:
- All() : Suite( "commands" ){
+ All() : Suite( "commands" ) {
}
- void setupTests(){
+ void setupTests() {
add< FileMD5::Type0 >();
add< FileMD5::Type2 >();
}
-
+
} all;
}
diff --git a/dbtests/cursortests.cpp b/dbtests/cursortests.cpp
index 954c8b0..ddd7b03 100644
--- a/dbtests/cursortests.cpp
+++ b/dbtests/cursortests.cpp
@@ -25,12 +25,12 @@
#include "dbtests.h"
namespace CursorTests {
-
+
namespace BtreeCursorTests {
// The ranges expressed in these tests are impossible given our query
// syntax, so going to do them a hacky way.
-
+
class Base {
protected:
FieldRangeVector *vec( int *vals, int len, int direction = 1 ) {
@@ -40,7 +40,8 @@ namespace CursorTests {
FieldRangeSet s2( "", _objs.back() );
if ( i == 0 ) {
s.range( "a" ) = s2.range( "a" );
- } else {
+ }
+ else {
s.range( "a" ) |= s2.range( "a" );
}
}
@@ -49,7 +50,7 @@ namespace CursorTests {
private:
vector< BSONObj > _objs;
};
-
+
class MultiRange : public Base {
public:
void run() {
@@ -103,7 +104,7 @@ namespace CursorTests {
ASSERT( !c.ok() );
}
};
-
+
class MultiRangeReverse : public Base {
public:
void run() {
@@ -129,7 +130,7 @@ namespace CursorTests {
ASSERT( !c.ok() );
}
};
-
+
class Base2 {
public:
virtual ~Base2() { _c.dropCollection( ns() ); }
@@ -167,7 +168,7 @@ namespace CursorTests {
dblock _lk;
vector< BSONObj > _objs;
};
-
+
class EqEq : public Base2 {
public:
void run() {
@@ -194,7 +195,7 @@ namespace CursorTests {
check( BSON( "a" << 4 << "b" << BSON( "$gte" << 1 << "$lte" << 10 ) ) );
}
virtual BSONObj idx() const { return BSON( "a" << 1 << "b" << 1 ); }
- };
+ };
class EqIn : public Base2 {
public:
@@ -210,7 +211,7 @@ namespace CursorTests {
check( BSON( "a" << 4 << "b" << BSON( "$in" << BSON_ARRAY( 5 << 6 << 11 ) ) ) );
}
virtual BSONObj idx() const { return BSON( "a" << 1 << "b" << 1 ); }
- };
+ };
class RangeEq : public Base2 {
public:
@@ -227,7 +228,7 @@ namespace CursorTests {
check( BSON( "a" << BSON( "$gte" << 1 << "$lte" << 10 ) << "b" << 4 ) );
}
virtual BSONObj idx() const { return BSON( "a" << 1 << "b" << 1 ); }
- };
+ };
class RangeIn : public Base2 {
public:
@@ -244,15 +245,15 @@ namespace CursorTests {
check( BSON( "a" << BSON( "$gte" << 1 << "$lte" << 10 ) << "b" << BSON( "$in" << BSON_ARRAY( 4 << 6 ) ) ) );
}
virtual BSONObj idx() const { return BSON( "a" << 1 << "b" << 1 ); }
- };
-
+ };
+
} // namespace BtreeCursorTests
-
+
class All : public Suite {
public:
- All() : Suite( "cursor" ){}
-
- void setupTests(){
+ All() : Suite( "cursor" ) {}
+
+ void setupTests() {
add< BtreeCursorTests::MultiRange >();
add< BtreeCursorTests::MultiRangeGap >();
add< BtreeCursorTests::MultiRangeReverse >();
diff --git a/dbtests/d_chunk_manager_tests.cpp b/dbtests/d_chunk_manager_tests.cpp
new file mode 100644
index 0000000..bcfe9fa
--- /dev/null
+++ b/dbtests/d_chunk_manager_tests.cpp
@@ -0,0 +1,467 @@
+//@file d_chunk_manager_tests.cpp : s/d_chunk_manager.{h,cpp} tests
+
+/**
+* Copyright (C) 2010 10gen Inc.
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU Affero General Public License, version 3,
+* as published by the Free Software Foundation.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU Affero General Public License for more details.
+*
+* You should have received a copy of the GNU Affero General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#include "pch.h"
+#include "dbtests.h"
+
+#include "../s/d_chunk_manager.h"
+
+namespace {
+
+ class BasicTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "test.foo" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 ) <<
+ "unique" << false );
+
+ // single-chunk collection
+ BSONArray chunks = BSON_ARRAY( BSON( "_id" << "test.foo-a_MinKey" <<
+ "ns" << "test.foo" <<
+ "min" << BSON( "a" << MINKEY ) <<
+ "max" << BSON( "a" << MAXKEY ) ) );
+
+ ShardChunkManager s ( collection , chunks );
+
+ BSONObj k1 = BSON( "a" << MINKEY );
+ ASSERT( s.belongsToMe( k1 ) );
+ BSONObj k2 = BSON( "a" << MAXKEY );
+ ASSERT( ! s.belongsToMe( k2 ) );
+ BSONObj k3 = BSON( "a" << 1 << "b" << 2 );
+ ASSERT( s.belongsToMe( k3 ) );
+ }
+ };
+
+ class BasicCompoundTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "test.foo" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 << "b" << 1) <<
+ "unique" << false );
+
+ // single-chunk collection
+ BSONArray chunks = BSON_ARRAY( BSON( "_id" << "test.foo-a_MinKeyb_MinKey" <<
+ "ns" << "test.foo" <<
+ "min" << BSON( "a" << MINKEY << "b" << MINKEY ) <<
+ "max" << BSON( "a" << MAXKEY << "b" << MAXKEY ) ) );
+
+ ShardChunkManager s ( collection , chunks );
+
+ BSONObj k1 = BSON( "a" << MINKEY << "b" << MINKEY );
+ ASSERT( s.belongsToMe( k1 ) );
+ BSONObj k2 = BSON( "a" << MAXKEY << "b" << MAXKEY );
+ ASSERT( ! s.belongsToMe( k2 ) );
+ BSONObj k3 = BSON( "a" << MINKEY << "b" << 10 );
+ ASSERT( s.belongsToMe( k3 ) );
+ BSONObj k4 = BSON( "a" << 10 << "b" << 20 );
+ ASSERT( s.belongsToMe( k4 ) );
+ }
+ };
+
+ class RangeTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "x.y" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 ) <<
+ "unique" << false );
+
+ // 3-chunk collection, 2 of them being contiguous
+ // [min->10) , [10->20) , <gap> , [30->max)
+ BSONArray chunks = BSON_ARRAY( BSON( "_id" << "x.y-a_MinKey" <<
+ "ns" << "x.y" <<
+ "min" << BSON( "a" << MINKEY ) <<
+ "max" << BSON( "a" << 10 ) ) <<
+ BSON( "_id" << "x.y-a_10" <<
+ "ns" << "x.y" <<
+ "min" << BSON( "a" << 10 ) <<
+ "max" << BSON( "a" << 20 ) ) <<
+ BSON( "_id" << "x.y-a_30" <<
+ "ns" << "x.y" <<
+ "min" << BSON( "a" << 30 ) <<
+ "max" << BSON( "a" << MAXKEY ) ) );
+
+ ShardChunkManager s ( collection , chunks );
+
+ BSONObj k1 = BSON( "a" << 5 );
+ ASSERT( s.belongsToMe( k1 ) );
+ BSONObj k2 = BSON( "a" << 10 );
+ ASSERT( s.belongsToMe( k2 ) );
+ BSONObj k3 = BSON( "a" << 25 );
+ ASSERT( ! s.belongsToMe( k3 ) );
+ BSONObj k4 = BSON( "a" << 30 );
+ ASSERT( s.belongsToMe( k4 ) );
+ BSONObj k5 = BSON( "a" << 40 );
+ ASSERT( s.belongsToMe( k5 ) );
+ }
+ };
+
+ class GetNextTests {
+ public:
+ void run() {
+
+ BSONObj collection = BSON( "_id" << "x.y" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 ) <<
+ "unique" << false );
+ // empty collection
+ BSONArray chunks1 = BSONArray();
+ ShardChunkManager s1( collection , chunks1 );
+
+ BSONObj empty;
+ BSONObj arbitraryKey = BSON( "a" << 10 );
+ BSONObj foundMin, foundMax;
+
+ ASSERT( s1.getNextChunk( empty , &foundMin , &foundMax ) );
+ ASSERT( foundMin.isEmpty() );
+ ASSERT( foundMax.isEmpty() );
+ ASSERT( s1.getNextChunk( arbitraryKey , &foundMin , &foundMax ) );
+ ASSERT( foundMin.isEmpty() );
+ ASSERT( foundMax.isEmpty() );
+
+ // single-chunk collection
+ // [10->20]
+ BSONObj key_a10 = BSON( "a" << 10 );
+ BSONObj key_a20 = BSON( "a" << 20 );
+ BSONArray chunks2 = BSON_ARRAY( BSON( "_id" << "x.y-a_10" <<
+ "ns" << "x.y" <<
+ "min" << key_a10 <<
+ "max" << key_a20 ) );
+ ShardChunkManager s2( collection , chunks2 );
+ ASSERT( s2.getNextChunk( empty , &foundMin , &foundMax ) );
+ ASSERT( foundMin.woCompare( key_a10 ) == 0 );
+ ASSERT( foundMax.woCompare( key_a20 ) == 0 );
+
+ // 3-chunk collection, 2 of them being contiguous
+ // [min->10) , [10->20) , <gap> , [30->max)
+ BSONObj key_a30 = BSON( "a" << 30 );
+ BSONObj key_min = BSON( "a" << MINKEY );
+ BSONObj key_max = BSON( "a" << MAXKEY );
+ BSONArray chunks3 = BSON_ARRAY( BSON( "_id" << "x.y-a_MinKey" <<
+ "ns" << "x.y" <<
+ "min" << key_min <<
+ "max" << key_a10 ) <<
+ BSON( "_id" << "x.y-a_10" <<
+ "ns" << "x.y" <<
+ "min" << key_a10 <<
+ "max" << key_a20 ) <<
+ BSON( "_id" << "x.y-a_30" <<
+ "ns" << "x.y" <<
+ "min" << key_a30 <<
+ "max" << key_max ) );
+ ShardChunkManager s3( collection , chunks3 );
+ ASSERT( ! s3.getNextChunk( empty , &foundMin , &foundMax ) ); // not eof
+ ASSERT( foundMin.woCompare( key_min ) == 0 );
+ ASSERT( foundMax.woCompare( key_a10 ) == 0 );
+ ASSERT( ! s3.getNextChunk( key_a10 , &foundMin , &foundMax ) );
+ ASSERT( foundMin.woCompare( key_a30 ) == 0 );
+ ASSERT( foundMax.woCompare( key_max ) == 0 );
+ ASSERT( s3.getNextChunk( key_a30 , &foundMin , &foundMax ) );
+ }
+ };
+
+ class DeletedTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "test.foo" <<
+ "dropped" << "true" );
+
+ BSONArray chunks = BSONArray();
+
+ ASSERT_EXCEPTION( ShardChunkManager s ( collection , chunks ) , UserException );
+ }
+ };
+
+ class ClonePlusTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "test.foo" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 << "b" << 1 ) <<
+ "unique" << false );
+ // 1-chunk collection
+ // [10,0-20,0)
+ BSONArray chunks = BSON_ARRAY( BSON( "_id" << "test.foo-a_MinKey" <<
+ "ns" << "test.foo" <<
+ "min" << BSON( "a" << 10 << "b" << 0 ) <<
+ "max" << BSON( "a" << 20 << "b" << 0 ) ) );
+
+ ShardChunkManager s ( collection , chunks );
+
+ // new chunk [20,0-30,0)
+ BSONObj min = BSON( "a" << 20 << "b" << 0 );
+ BSONObj max = BSON( "a" << 30 << "b" << 0 );
+ ShardChunkManagerPtr cloned( s.clonePlus( min , max , 1 /* TODO test version */ ) );
+
+ BSONObj k1 = BSON( "a" << 5 << "b" << 0 );
+ ASSERT( ! cloned->belongsToMe( k1 ) );
+ BSONObj k2 = BSON( "a" << 20 << "b" << 0 );
+ ASSERT( cloned->belongsToMe( k2 ) );
+ BSONObj k3 = BSON( "a" << 25 << "b" << 0 );
+ ASSERT( cloned->belongsToMe( k3 ) );
+ BSONObj k4 = BSON( "a" << 30 << "b" << 0 );
+ ASSERT( ! cloned->belongsToMe( k4 ) );
+ }
+ };
+
+ class ClonePlusExceptionTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "test.foo" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 << "b" << 1 ) <<
+ "unique" << false );
+ // 1-chunk collection
+ // [10,0-20,0)
+ BSONArray chunks = BSON_ARRAY( BSON( "_id" << "test.foo-a_MinKey" <<
+ "ns" << "test.foo" <<
+ "min" << BSON( "a" << 10 << "b" << 0 ) <<
+ "max" << BSON( "a" << 20 << "b" << 0 ) ) );
+
+ ShardChunkManager s ( collection , chunks );
+
+ // [15,0-25,0) overlaps [10,0-20,0)
+ BSONObj min = BSON( "a" << 15 << "b" << 0 );
+ BSONObj max = BSON( "a" << 25 << "b" << 0 );
+ ASSERT_EXCEPTION( s.clonePlus ( min , max , 1 /* TODO test version */ ) , UserException );
+ }
+ };
+
+ class CloneMinusTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "x.y" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 << "b" << 1 ) <<
+ "unique" << false );
+
+ // 2-chunk collection
+ // [10,0->20,0) , <gap> , [30,0->40,0)
+ BSONArray chunks = BSON_ARRAY( BSON( "_id" << "x.y-a_10b_0" <<
+ "ns" << "x.y" <<
+ "min" << BSON( "a" << 10 << "b" << 0 ) <<
+ "max" << BSON( "a" << 20 << "b" << 0 ) ) <<
+ BSON( "_id" << "x.y-a_30b_0" <<
+ "ns" << "x.y" <<
+ "min" << BSON( "a" << 30 << "b" << 0 ) <<
+ "max" << BSON( "a" << 40 << "b" << 0 ) ) );
+
+ ShardChunkManager s ( collection , chunks );
+
+ // deleting chunk [10,0-20,0)
+ BSONObj min = BSON( "a" << 10 << "b" << 0 );
+ BSONObj max = BSON( "a" << 20 << "b" << 0 );
+ ShardChunkManagerPtr cloned( s.cloneMinus( min , max , 1 /* TODO test version */ ) );
+
+ BSONObj k1 = BSON( "a" << 5 << "b" << 0 );
+ ASSERT( ! cloned->belongsToMe( k1 ) );
+ BSONObj k2 = BSON( "a" << 15 << "b" << 0 );
+ ASSERT( ! cloned->belongsToMe( k2 ) );
+ BSONObj k3 = BSON( "a" << 30 << "b" << 0 );
+ ASSERT( cloned->belongsToMe( k3 ) );
+ BSONObj k4 = BSON( "a" << 35 << "b" << 0 );
+ ASSERT( cloned->belongsToMe( k4 ) );
+ BSONObj k5 = BSON( "a" << 40 << "b" << 0 );
+ ASSERT( ! cloned->belongsToMe( k5 ) );
+ }
+ };
+
+ class CloneMinusExceptionTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "x.y" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 << "b" << 1 ) <<
+ "unique" << false );
+
+ // 2-chunk collection
+ // [10,0->20,0) , <gap> , [30,0->40,0)
+ BSONArray chunks = BSON_ARRAY( BSON( "_id" << "x.y-a_10b_0" <<
+ "ns" << "x.y" <<
+ "min" << BSON( "a" << 10 << "b" << 0 ) <<
+ "max" << BSON( "a" << 20 << "b" << 0 ) ) <<
+ BSON( "_id" << "x.y-a_30b_0" <<
+ "ns" << "x.y" <<
+ "min" << BSON( "a" << 30 << "b" << 0 ) <<
+ "max" << BSON( "a" << 40 << "b" << 0 ) ) );
+
+ ShardChunkManager s ( collection , chunks );
+
+ // deleting non-existing chunk [25,0-28,0)
+ BSONObj min1 = BSON( "a" << 25 << "b" << 0 );
+ BSONObj max1 = BSON( "a" << 28 << "b" << 0 );
+ ASSERT_EXCEPTION( s.cloneMinus( min1 , max1 , 1 /* TODO test version */ ) , UserException );
+
+
+ // deletin an overlapping range (not exactly a chunk) [15,0-25,0)
+ BSONObj min2 = BSON( "a" << 15 << "b" << 0 );
+ BSONObj max2 = BSON( "a" << 25 << "b" << 0 );
+ ASSERT_EXCEPTION( s.cloneMinus( min2 , max2 , 1 /* TODO test version */ ) , UserException );
+ }
+ };
+
+ class CloneSplitTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "test.foo" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 << "b" << 1 ) <<
+ "unique" << false );
+ // 1-chunk collection
+ // [10,0-20,0)
+ BSONObj min = BSON( "a" << 10 << "b" << 0 );
+ BSONObj max = BSON( "a" << 20 << "b" << 0 );
+ BSONArray chunks = BSON_ARRAY( BSON( "_id" << "test.foo-a_MinKey"
+ << "ns" << "test.foo"
+ << "min" << min
+ << "max" << max ) );
+
+ ShardChunkManager s ( collection , chunks );
+
+ BSONObj split1 = BSON( "a" << 15 << "b" << 0 );
+ BSONObj split2 = BSON( "a" << 18 << "b" << 0 );
+ vector<BSONObj> splitKeys;
+ splitKeys.push_back( split1 );
+ splitKeys.push_back( split2 );
+ ShardChunkVersion version( 1 , 99 ); // first chunk 1|99 , second 1|100
+ ShardChunkManagerPtr cloned( s.cloneSplit( min , max , splitKeys , version ) );
+
+ version.incMinor(); /* second chunk 1|100, first split point */
+ version.incMinor(); /* third chunk 1|101, second split point */
+ ASSERT_EQUALS( cloned->getVersion() , version /* 1|101 */ );
+ ASSERT_EQUALS( s.getNumChunks() , 1u );
+ ASSERT_EQUALS( cloned->getNumChunks() , 3u );
+ ASSERT( cloned->belongsToMe( min ) );
+ ASSERT( cloned->belongsToMe( split1 ) );
+ ASSERT( cloned->belongsToMe( split2 ) );
+ ASSERT( ! cloned->belongsToMe( max ) );
+ }
+ };
+
+ class CloneSplitExceptionTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "test.foo" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 << "b" << 1 ) <<
+ "unique" << false );
+ // 1-chunk collection
+ // [10,0-20,0)
+ BSONObj min = BSON( "a" << 10 << "b" << 0 );
+ BSONObj max = BSON( "a" << 20 << "b" << 0 );
+ BSONArray chunks = BSON_ARRAY( BSON( "_id" << "test.foo-a_MinKey"
+ << "ns" << "test.foo"
+ << "min" << min
+ << "max" << max ) );
+
+ ShardChunkManager s ( collection , chunks );
+
+ BSONObj badSplit = BSON( "a" << 5 << "b" << 0 );
+ vector<BSONObj> splitKeys;
+ splitKeys.push_back( badSplit );
+ ASSERT_EXCEPTION( s.cloneSplit( min , max , splitKeys , ShardChunkVersion( 1 ) ) , UserException );
+
+ BSONObj badMax = BSON( "a" << 25 << "b" << 0 );
+ BSONObj split = BSON( "a" << 15 << "b" << 0 );
+ splitKeys.clear();
+ splitKeys.push_back( split );
+ ASSERT_EXCEPTION( s.cloneSplit( min , badMax, splitKeys , ShardChunkVersion( 1 ) ) , UserException );
+ }
+ };
+
+ class EmptyShardTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "test.foo" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 ) <<
+ "unique" << false );
+
+ // no chunks on this shard
+ BSONArray chunks;
+
+ // shard can have zero chunks for an existing collection
+ // version should be 0, though
+ ShardChunkManager s( collection , chunks );
+ ASSERT_EQUALS( s.getVersion() , ShardChunkVersion( 0 ) );
+ ASSERT_EQUALS( s.getNumChunks() , 0u );
+ }
+ };
+
+ class LastChunkTests {
+ public:
+ void run() {
+ BSONObj collection = BSON( "_id" << "test.foo" <<
+ "dropped" << false <<
+ "key" << BSON( "a" << 1 ) <<
+ "unique" << false );
+
+ // 1-chunk collection
+ // [10->20)
+ BSONArray chunks = BSON_ARRAY( BSON( "_id" << "test.foo-a_10" <<
+ "ns" << "test.foo" <<
+ "min" << BSON( "a" << 10 ) <<
+ "max" << BSON( "a" << 20 ) ) );
+
+ ShardChunkManager s( collection , chunks );
+ BSONObj min = BSON( "a" << 10 );
+ BSONObj max = BSON( "a" << 20 );
+
+ // if we remove the only chunk, the only version accepted is 0
+ ShardChunkVersion nonZero = 99;
+ ASSERT_EXCEPTION( s.cloneMinus( min , max , nonZero ) , UserException );
+ ShardChunkManagerPtr empty( s.cloneMinus( min , max , 0 ) );
+ ASSERT_EQUALS( empty->getVersion() , ShardChunkVersion( 0 ) );
+ ASSERT_EQUALS( empty->getNumChunks() , 0u );
+ BSONObj k = BSON( "a" << 15 << "b" << 0 );
+ ASSERT( ! empty->belongsToMe( k ) );
+
+ // we can add a chunk to an empty manager
+ // version should be provided
+ ASSERT_EXCEPTION( empty->clonePlus( min , max , 0 ) , UserException );
+ ShardChunkManagerPtr cloned( empty->clonePlus( min , max , nonZero ) );
+ ASSERT_EQUALS( cloned->getVersion(), nonZero );
+ ASSERT_EQUALS( cloned->getNumChunks() , 1u );
+ ASSERT( cloned->belongsToMe( k ) );
+ }
+ };
+
+ class ShardChunkManagerSuite : public Suite {
+ public:
+ ShardChunkManagerSuite() : Suite ( "shard_chunk_manager" ) {}
+
+ void setupTests() {
+ add< BasicTests >();
+ add< BasicCompoundTests >();
+ add< RangeTests >();
+ add< GetNextTests >();
+ add< DeletedTests >();
+ add< ClonePlusTests >();
+ add< ClonePlusExceptionTests >();
+ add< CloneMinusTests >();
+ add< CloneMinusExceptionTests >();
+ add< CloneSplitTests >();
+ add< CloneSplitExceptionTests >();
+ add< EmptyShardTests >();
+ add< LastChunkTests >();
+ }
+ } shardChunkManagerSuite;
+
+} // anonymous namespace
diff --git a/dbtests/dbtests.cpp b/dbtests/dbtests.cpp
index 195a1d1..8ede08d 100644
--- a/dbtests/dbtests.cpp
+++ b/dbtests/dbtests.cpp
@@ -1,4 +1,4 @@
-// dbtests.cpp : Runs db unit tests.
+// #file dbtests.cpp : Runs db unit tests.
//
/**
@@ -18,11 +18,9 @@
*/
#include "pch.h"
-
#include "dbtests.h"
int main( int argc, char** argv ) {
static StaticObserver StaticObserver;
return Suite::run(argc, argv, "/tmp/unittest");
}
-
diff --git a/dbtests/directclienttests.cpp b/dbtests/directclienttests.cpp
new file mode 100644
index 0000000..204bf92
--- /dev/null
+++ b/dbtests/directclienttests.cpp
@@ -0,0 +1,80 @@
+/** @file directclienttests.cpp
+*/
+
+/**
+ * Copyright (C) 2008 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "../db/query.h"
+#include "../db/db.h"
+#include "../db/instance.h"
+#include "../db/json.h"
+#include "../db/lasterror.h"
+#include "../db/update.h"
+#include "../util/timer.h"
+#include "dbtests.h"
+
+namespace DirectClientTests {
+
+ class ClientBase {
+ public:
+ // NOTE: Not bothering to backup the old error record.
+ ClientBase() { mongo::lastError.reset( new LastError() ); }
+ virtual ~ClientBase() { }
+ protected:
+ static bool error() {
+ return !_client.getPrevError().getField( "err" ).isNull();
+ }
+ DBDirectClient &client() const { return _client; }
+ private:
+ static DBDirectClient _client;
+ };
+ DBDirectClient ClientBase::_client;
+
+ const char *ns = "a.b";
+
+ class Capped : public ClientBase {
+ public:
+ virtual void run() {
+ for( int pass=0; pass < 3; pass++ ) {
+ client().createCollection(ns, 1024 * 1024, true, 999);
+ for( int j =0; j < pass*3; j++ )
+ client().insert(ns, BSON("x" << j));
+
+ // test truncation of a capped collection
+ if( pass ) {
+ BSONObj info;
+ BSONObj cmd = BSON( "captrunc" << "b" << "n" << 1 << "inc" << true );
+ cout << cmd.toString() << endl;
+ bool ok = client().runCommand("a", cmd, info);
+ cout << info.toString() << endl;
+ assert(ok);
+ }
+
+ assert( client().dropCollection(ns) );
+ }
+ }
+ };
+
+ class All : public Suite {
+ public:
+ All() : Suite( "directclient" ) {
+ }
+ void setupTests() {
+ add< Capped >();
+ }
+ } myall;
+}
diff --git a/dbtests/framework.cpp b/dbtests/framework.cpp
index e624211..c92c8d6 100644
--- a/dbtests/framework.cpp
+++ b/dbtests/framework.cpp
@@ -25,6 +25,7 @@
#include "framework.h"
#include "../util/file_allocator.h"
+#include "../db/dur.h"
#ifndef _WIN32
#include <cxxabi.h>
@@ -34,7 +35,7 @@
namespace po = boost::program_options;
namespace mongo {
-
+
CmdLine cmdLine;
namespace regression {
@@ -46,21 +47,21 @@ namespace mongo {
Result( string name ) : _name( name ) , _rc(0) , _tests(0) , _fails(0) , _asserts(0) {
}
- string toString(){
+ string toString() {
stringstream ss;
char result[128];
sprintf(result, "%-20s | tests: %4d | fails: %4d | assert calls: %6d\n", _name.c_str(), _tests, _fails, _asserts);
ss << result;
- for ( list<string>::iterator i=_messages.begin(); i!=_messages.end(); i++ ){
+ for ( list<string>::iterator i=_messages.begin(); i!=_messages.end(); i++ ) {
ss << "\t" << *i << '\n';
}
-
+
return ss.str();
}
- int rc(){
+ int rc() {
return _rc;
}
@@ -77,7 +78,7 @@ namespace mongo {
Result * Result::cur = 0;
- Result * Suite::run( const string& filter ){
+ Result * Suite::run( const string& filter ) {
tlogLevel = -1;
log(1) << "\t about to setupTests" << endl;
@@ -90,53 +91,53 @@ namespace mongo {
/* see note in SavedContext */
//writelock lk("");
- for ( list<TestCase*>::iterator i=_tests.begin(); i!=_tests.end(); i++ ){
+ for ( list<TestCase*>::iterator i=_tests.begin(); i!=_tests.end(); i++ ) {
TestCase * tc = *i;
- if ( filter.size() && tc->getName().find( filter ) == string::npos ){
+ if ( filter.size() && tc->getName().find( filter ) == string::npos ) {
log(1) << "\t skipping test: " << tc->getName() << " because doesn't match filter" << endl;
continue;
}
r->_tests++;
-
+
bool passes = false;
-
+
log(1) << "\t going to run test: " << tc->getName() << endl;
-
+
stringstream err;
err << tc->getName() << "\t";
-
+
try {
tc->run();
passes = true;
}
- catch ( MyAssertionException * ae ){
+ catch ( MyAssertionException * ae ) {
err << ae->ss.str();
delete( ae );
}
- catch ( std::exception& e ){
+ catch ( std::exception& e ) {
err << " exception: " << e.what();
}
- catch ( int x ){
+ catch ( int x ) {
err << " caught int : " << x << endl;
}
- catch ( ... ){
+ catch ( ... ) {
cerr << "unknown exception in test: " << tc->getName() << endl;
}
-
- if ( ! passes ){
+
+ if ( ! passes ) {
string s = err.str();
log() << "FAIL: " << s << endl;
r->_fails++;
r->_messages.push_back( s );
- }
+ }
}
-
+
if ( r->_fails )
r->_rc = 17;
log(1) << "\t DONE running tests" << endl;
-
+
return r;
}
@@ -155,20 +156,23 @@ namespace mongo {
po::positional_options_description positional_options;
shell_options.add_options()
- ("help,h", "show this usage information")
- ("dbpath", po::value<string>(&dbpathSpec)->default_value(default_dbpath),
- "db data path for this test run. NOTE: the contents of this "
- "directory will be overwritten if it already exists")
- ("debug", "run tests with verbose output")
- ("list,l", "list available test suites")
- ("filter,f" , po::value<string>() , "string substring filter on test name" )
- ("verbose,v", "verbose")
- ("seed", po::value<unsigned long long>(&seed), "random number seed")
- ;
-
+ ("help,h", "show this usage information")
+ ("dbpath", po::value<string>(&dbpathSpec)->default_value(default_dbpath),
+ "db data path for this test run. NOTE: the contents of this "
+ "directory will be overwritten if it already exists")
+ ("debug", "run tests with verbose output")
+ ("list,l", "list available test suites")
+ ("bigfiles", "use big datafiles instead of smallfiles which is the default")
+ ("filter,f" , po::value<string>() , "string substring filter on test name" )
+ ("verbose,v", "verbose")
+ ("dur", "enable journaling")
+ ("nodur", "disable journaling (currently the default)")
+ ("seed", po::value<unsigned long long>(&seed), "random number seed")
+ ;
+
hidden_options.add_options()
- ("suites", po::value< vector<string> >(), "test suites to run")
- ;
+ ("suites", po::value< vector<string> >(), "test suites to run")
+ ;
positional_options.add("suites", -1);
@@ -185,7 +189,8 @@ namespace mongo {
positional(positional_options).
style(command_line_style).run(), params);
po::notify(params);
- } catch (po::error &e) {
+ }
+ catch (po::error &e) {
cout << "ERROR: " << e.what() << endl << endl;
show_help_text(argv[0], shell_options);
return EXIT_BADOPTIONS;
@@ -196,6 +201,13 @@ namespace mongo {
return EXIT_CLEAN;
}
+ if( params.count("nodur") ) {
+ cmdLine.dur = false;
+ }
+ if( params.count("dur") || cmdLine.dur ) {
+ cmdLine.dur = true;
+ }
+
if (params.count("debug") || params.count("verbose") ) {
logLevel = 1;
}
@@ -217,18 +229,25 @@ namespace mongo {
}
boost::filesystem::directory_iterator end_iter;
for (boost::filesystem::directory_iterator dir_iter(p);
- dir_iter != end_iter; ++dir_iter) {
+ dir_iter != end_iter; ++dir_iter) {
boost::filesystem::remove_all(*dir_iter);
}
- } else {
+ }
+ else {
boost::filesystem::create_directory(p);
}
string dbpathString = p.native_directory_string();
dbpath = dbpathString.c_str();
-
+
cmdLine.prealloc = false;
+
+ // dbtest defaults to smallfiles
cmdLine.smallfiles = true;
+ if( params.count("bigfiles") ) {
+ cmdLine.dur = true;
+ }
+
cmdLine.oplogSize = 10 * 1024 * 1024;
Client::initThread("testsuite");
acquirePathLock();
@@ -236,32 +255,39 @@ namespace mongo {
srand( (unsigned) seed );
printGitVersion();
printSysInfo();
- out() << "random seed: " << seed << endl;
+ log() << "random seed: " << seed << endl;
- theFileAllocator().start();
+ FileAllocator::get()->start();
vector<string> suites;
if (params.count("suites")) {
suites = params["suites"].as< vector<string> >();
}
-
+
string filter = "";
- if ( params.count( "filter" ) ){
+ if ( params.count( "filter" ) ) {
filter = params["filter"].as<string>();
}
+ dur::startup();
+
+ if( debug && cmdLine.dur ) {
+ cout << "setting cmdLine.durOptions=8" << endl;
+ cmdLine.durOptions = 8;
+ }
+
int ret = run(suites,filter);
#if !defined(_WIN32) && !defined(__sunos__)
flock( lockFile, LOCK_UN );
#endif
-
+
cc().shutdown();
dbexit( (ExitCode)ret ); // so everything shuts down cleanly
return ret;
}
- int Suite::run( vector<string> suites , const string& filter ){
+ int Suite::run( vector<string> suites , const string& filter ) {
for ( unsigned int i = 0; i < suites.size(); i++ ) {
if ( _suites->find( suites[i] ) == _suites->end() ) {
cout << "invalid test [" << suites[i] << "], use --list to see valid names" << endl;
@@ -277,7 +303,7 @@ namespace mongo {
list<Result*> results;
- for ( list<string>::iterator i=torun.begin(); i!=torun.end(); i++ ){
+ for ( list<string>::iterator i=torun.begin(); i!=torun.end(); i++ ) {
string name = *i;
Suite * s = (*_suites)[name];
assert( s );
@@ -298,12 +324,12 @@ namespace mongo {
int fails = 0;
int asserts = 0;
- for ( list<Result*>::iterator i=results.begin(); i!=results.end(); i++ ){
+ for ( list<Result*>::iterator i=results.begin(); i!=results.end(); i++ ) {
Result * r = *i;
cout << r->toString();
if ( abs( r->rc() ) > abs( rc ) )
rc = r->rc();
-
+
tests += r->_tests;
fails += r->_fails;
asserts += r->_asserts;
@@ -313,13 +339,13 @@ namespace mongo {
totals._tests = tests;
totals._fails = fails;
totals._asserts = asserts;
-
+
cout << totals.toString(); // includes endl
return rc;
}
- void Suite::registerSuite( string name , Suite * s ){
+ void Suite::registerSuite( string name , Suite * s ) {
if ( ! _suites )
_suites = new map<string,Suite*>();
Suite*& m = (*_suites)[name];
@@ -327,37 +353,37 @@ namespace mongo {
m = s;
}
- void assert_pass(){
+ void assert_pass() {
Result::cur->_asserts++;
}
- void assert_fail( const char * exp , const char * file , unsigned line ){
+ void assert_fail( const char * exp , const char * file , unsigned line ) {
Result::cur->_asserts++;
-
+
MyAssertionException * e = new MyAssertionException();
e->ss << "ASSERT FAILED! " << file << ":" << line << endl;
throw e;
}
- void fail( const char * exp , const char * file , unsigned line ){
+ void fail( const char * exp , const char * file , unsigned line ) {
assert(0);
}
- MyAssertionException * MyAsserts::getBase(){
+ MyAssertionException * MyAsserts::getBase() {
MyAssertionException * e = new MyAssertionException();
e->ss << _file << ":" << _line << " " << _aexp << " != " << _bexp << " ";
return e;
}
-
- void MyAsserts::printLocation(){
+
+ void MyAsserts::printLocation() {
log() << _file << ":" << _line << " " << _aexp << " != " << _bexp << " ";
}
- void MyAsserts::_gotAssert(){
+ void MyAsserts::_gotAssert() {
Result::cur->_asserts++;
}
}
- void setupSignals(){}
+ void setupSignals( bool inFork ) {}
}
diff --git a/dbtests/framework.h b/dbtests/framework.h
index bec14a2..29ba58b 100644
--- a/dbtests/framework.h
+++ b/dbtests/framework.h
@@ -49,7 +49,7 @@ namespace mongo {
class TestCase {
public:
- virtual ~TestCase(){}
+ virtual ~TestCase() {}
virtual void run() = 0;
virtual string getName() = 0;
};
@@ -57,15 +57,15 @@ namespace mongo {
template< class T >
class TestHolderBase : public TestCase {
public:
- TestHolderBase(){}
- virtual ~TestHolderBase(){}
- virtual void run(){
+ TestHolderBase() {}
+ virtual ~TestHolderBase() {}
+ virtual void run() {
auto_ptr<T> t;
t.reset( create() );
t->run();
}
virtual T * create() = 0;
- virtual string getName(){
+ virtual string getName() {
return demangleName( typeid(T) );
}
};
@@ -73,7 +73,7 @@ namespace mongo {
template< class T >
class TestHolder0 : public TestHolderBase<T> {
public:
- virtual T * create(){
+ virtual T * create() {
return new T();
}
};
@@ -81,8 +81,8 @@ namespace mongo {
template< class T , typename A >
class TestHolder1 : public TestHolderBase<T> {
public:
- TestHolder1( const A& a ) : _a(a){}
- virtual T * create(){
+ TestHolder1( const A& a ) : _a(a) {}
+ virtual T * create() {
return new T( _a );
}
const A& _a;
@@ -90,25 +90,25 @@ namespace mongo {
class Suite {
public:
- Suite( string name ) : _name( name ){
+ Suite( string name ) : _name( name ) {
registerSuite( name , this );
_ran = 0;
}
virtual ~Suite() {
- if ( _ran ){
+ if ( _ran ) {
DBDirectClient c;
c.dropDatabase( "unittests" );
}
}
template<class T>
- void add(){
+ void add() {
_tests.push_back( new TestHolder0<T>() );
}
template<class T , typename A >
- void add( const A& a ){
+ void add( const A& a ) {
_tests.push_back( new TestHolder1<T,A>(a) );
}
@@ -137,7 +137,7 @@ namespace mongo {
class MyAssertionException : boost::noncopyable {
public:
- MyAssertionException(){
+ MyAssertionException() {
ss << "assertion: ";
}
stringstream ss;
@@ -148,32 +148,32 @@ namespace mongo {
class MyAsserts {
public:
MyAsserts( const char * aexp , const char * bexp , const char * file , unsigned line )
- : _aexp( aexp ) , _bexp( bexp ) , _file( file ) , _line( line ){
+ : _aexp( aexp ) , _bexp( bexp ) , _file( file ) , _line( line ) {
}
-
+
template<typename A,typename B>
- void ae( A a , B b ){
+ void ae( A a , B b ) {
_gotAssert();
if ( a == b )
return;
-
+
printLocation();
-
+
MyAssertionException * e = getBase();
e->ss << a << " != " << b << endl;
log() << e->ss.str() << endl;
throw e;
}
-
+
template<typename A,typename B>
- void nae( A a , B b ){
+ void nae( A a , B b ) {
_gotAssert();
if ( a != b )
return;
-
+
printLocation();
-
+
MyAssertionException * e = getBase();
e->ss << a << " == " << b << endl;
log() << e->ss.str() << endl;
@@ -182,13 +182,13 @@ namespace mongo {
void printLocation();
-
+
private:
-
+
void _gotAssert();
-
+
MyAssertionException * getBase();
-
+
string _aexp;
string _bexp;
string _file;
diff --git a/dbtests/histogram_test.cpp b/dbtests/histogram_test.cpp
index 5a8970d..e9cbb5b 100644
--- a/dbtests/histogram_test.cpp
+++ b/dbtests/histogram_test.cpp
@@ -25,9 +25,9 @@ namespace mongo {
using mongo::Histogram;
- class BoundariesInit{
+ class BoundariesInit {
public:
- void run(){
+ void run() {
Histogram::Options opts;
opts.numBuckets = 3;
opts.bucketSize = 10;
@@ -45,9 +45,9 @@ namespace mongo {
}
};
- class BoundariesExponential{
+ class BoundariesExponential {
public:
- void run(){
+ void run() {
Histogram::Options opts;
opts.numBuckets = 4;
opts.bucketSize = 125;
@@ -57,13 +57,13 @@ namespace mongo {
ASSERT_EQUALS( h.getBoundary( 0 ), 125u );
ASSERT_EQUALS( h.getBoundary( 1 ), 250u );
ASSERT_EQUALS( h.getBoundary( 2 ), 500u );
- ASSERT_EQUALS( h.getBoundary( 3 ), numeric_limits<uint32_t>::max() );
+ ASSERT_EQUALS( h.getBoundary( 3 ), numeric_limits<uint32_t>::max() );
}
};
- class BoundariesFind{
+ class BoundariesFind {
public:
- void run(){
+ void run() {
Histogram::Options opts;
opts.numBuckets = 3;
opts.bucketSize = 10;
@@ -81,14 +81,14 @@ namespace mongo {
class HistogramSuite : public Suite {
public:
- HistogramSuite() : Suite( "histogram" ){}
+ HistogramSuite() : Suite( "histogram" ) {}
- void setupTests(){
+ void setupTests() {
add< BoundariesInit >();
add< BoundariesExponential >();
add< BoundariesFind >();
// TODO: complete the test suite
- }
+ }
} histogramSuite;
} // anonymous namespace
diff --git a/dbtests/jsobjtests.cpp b/dbtests/jsobjtests.cpp
index ea7606f..6804d71 100644
--- a/dbtests/jsobjtests.cpp
+++ b/dbtests/jsobjtests.cpp
@@ -150,7 +150,7 @@ namespace JsobjTests {
class MultiKeySortOrder : public Base {
public:
- void run(){
+ void run() {
ASSERT( BSON( "x" << "a" ).woCompare( BSON( "x" << "b" ) ) < 0 );
ASSERT( BSON( "x" << "b" ).woCompare( BSON( "x" << "a" ) ) > 0 );
@@ -255,9 +255,9 @@ namespace JsobjTests {
}
};
- class AsTempObj{
+ class AsTempObj {
public:
- void run(){
+ void run() {
{
BSONObjBuilder bb;
bb << "a" << 1;
@@ -267,7 +267,7 @@ namespace JsobjTests {
ASSERT(tmp.hasField("a"));
ASSERT(!tmp.hasField("b"));
ASSERT(tmp == BSON("a" << 1));
-
+
bb << "b" << 2;
BSONObj obj = bb.obj();
ASSERT_EQUALS(obj.objsize() , 4+(1+2+4)+(1+2+4)+1);
@@ -285,7 +285,7 @@ namespace JsobjTests {
ASSERT(tmp.hasField("a"));
ASSERT(!tmp.hasField("b"));
ASSERT(tmp == BSON("a" << BSON("$gt" << 1)));
-
+
bb << "b" << LT << 2;
BSONObj obj = bb.obj();
ASSERT(obj.objsize() == 4+(1+2+(4+1+4+4+1))+(1+2+(4+1+4+4+1))+1);
@@ -293,7 +293,7 @@ namespace JsobjTests {
ASSERT(obj.hasField("a"));
ASSERT(obj.hasField("b"));
ASSERT(obj == BSON("a" << BSON("$gt" << 1)
- << "b" << BSON("$lt" << 2)));
+ << "b" << BSON("$lt" << 2)));
}
{
BSONObjBuilder bb(32);
@@ -304,10 +304,10 @@ namespace JsobjTests {
ASSERT(tmp.hasField("a"));
ASSERT(!tmp.hasField("b"));
ASSERT(tmp == BSON("a" << 1));
-
+
//force a realloc
BSONArrayBuilder arr;
- for (int i=0; i < 10000; i++){
+ for (int i=0; i < 10000; i++) {
arr << i;
}
bb << "b" << arr.arr();
@@ -319,8 +319,8 @@ namespace JsobjTests {
}
};
- struct AppendIntOrLL{
- void run(){
+ struct AppendIntOrLL {
+ void run() {
const long long billion = 1000*1000*1000;
BSONObjBuilder b;
b.appendIntOrLL("i1", 1);
@@ -362,16 +362,16 @@ namespace JsobjTests {
};
struct AppendNumber {
- void run(){
+ void run() {
BSONObjBuilder b;
b.appendNumber( "a" , 5 );
b.appendNumber( "b" , 5.5 );
b.appendNumber( "c" , (1024LL*1024*1024)-1 );
b.appendNumber( "d" , (1024LL*1024*1024*1024)-1 );
b.appendNumber( "e" , 1024LL*1024*1024*1024*1024*1024 );
-
+
BSONObj o = b.obj();
-
+
ASSERT( o["a"].type() == NumberInt );
ASSERT( o["b"].type() == NumberDouble );
ASSERT( o["c"].type() == NumberInt );
@@ -380,7 +380,7 @@ namespace JsobjTests {
}
};
-
+
class ToStringArray {
public:
void run() {
@@ -391,28 +391,28 @@ namespace JsobjTests {
class ToStringNumber {
public:
-
- void run(){
+
+ void run() {
BSONObjBuilder b;
b.append( "a" , (int)4 );
b.append( "b" , (double)5 );
b.append( "c" , (long long)6 );
-
+
b.append( "d" , 123.456789123456789123456789123456789 );
b.append( "e" , 123456789.123456789123456789123456789 );
b.append( "f" , 1234567891234567891234.56789123456789 );
b.append( "g" , -123.456 );
-
+
BSONObj x = b.obj();
ASSERT_EQUALS( "4", x["a"].toString( false , true ) );
ASSERT_EQUALS( "5.0", x["b"].toString( false , true ) );
- ASSERT_EQUALS( "6", x["c"].toString( false , true ) );
+ ASSERT_EQUALS( "6", x["c"].toString( false , true ) );
ASSERT_EQUALS( "123.4567891234568" , x["d"].toString( false , true ) );
ASSERT_EQUALS( "123456789.1234568" , x["e"].toString( false , true ) );
// ASSERT_EQUALS( "1.234567891234568e+21" , x["f"].toString( false , true ) ); // windows and *nix are different - TODO, work around for test or not bother?
-
+
ASSERT_EQUALS( "-123.456" , x["g"].toString( false , true ) );
}
@@ -442,6 +442,46 @@ namespace JsobjTests {
};
+ class AppendAs {
+ public:
+ void run() {
+ BSONObjBuilder b;
+ {
+ BSONObj foo = BSON( "foo" << 1 );
+ b.appendAs( foo.firstElement(), "bar" );
+ }
+ ASSERT_EQUALS( BSON( "bar" << 1 ), b.done() );
+ }
+ };
+
+ class ArrayAppendAs {
+ public:
+ void run() {
+ BSONArrayBuilder b;
+ {
+ BSONObj foo = BSON( "foo" << 1 );
+ b.appendAs( foo.firstElement(), "3" );
+ }
+ BSONArray a = b.arr();
+ BSONObj expected = BSON( "3" << 1 );
+ ASSERT_EQUALS( expected.firstElement(), a[ 3 ] );
+ ASSERT_EQUALS( 4, a.nFields() );
+ }
+ };
+
+ class GetField {
+ public:
+ void run(){
+ BSONObj o = BSON( "a" << 1 <<
+ "b" << BSON( "a" << 2 ) <<
+ "c" << BSON_ARRAY( BSON( "a" << 3 ) << BSON( "a" << 4 ) ) );
+ ASSERT_EQUALS( 1 , o.getFieldDotted( "a" ).numberInt() );
+ ASSERT_EQUALS( 2 , o.getFieldDotted( "b.a" ).numberInt() );
+ ASSERT_EQUALS( 3 , o.getFieldDotted( "c.0.a" ).numberInt() );
+ ASSERT_EQUALS( 4 , o.getFieldDotted( "c.1.a" ).numberInt() );
+ }
+ };
+
namespace Validation {
class Base {
@@ -691,12 +731,12 @@ namespace JsobjTests {
a.valid();
BSONObj b = fromjson( "{\"one\":2, \"two\":5, \"three\": {},"
- "\"four\": { \"five\": { \"six\" : 11 } },"
- "\"seven\": [ \"a\", \"bb\", \"ccc\", 5 ],"
- "\"eight\": Dbref( \"rrr\", \"01234567890123456789aaaa\" ),"
- "\"_id\": ObjectId( \"deadbeefdeadbeefdeadbeef\" ),"
- "\"nine\": { \"$binary\": \"abc=\", \"$type\": \"00\" },"
- "\"ten\": Date( 44 ), \"eleven\": /foooooo/i }" );
+ "\"four\": { \"five\": { \"six\" : 11 } },"
+ "\"seven\": [ \"a\", \"bb\", \"ccc\", 5 ],"
+ "\"eight\": Dbref( \"rrr\", \"01234567890123456789aaaa\" ),"
+ "\"_id\": ObjectId( \"deadbeefdeadbeefdeadbeef\" ),"
+ "\"nine\": { \"$binary\": \"abc=\", \"$type\": \"00\" },"
+ "\"ten\": Date( 44 ), \"eleven\": /foooooo/i }" );
fuzz( b );
b.valid();
}
@@ -723,7 +763,7 @@ namespace JsobjTests {
class init1 {
public:
- void run(){
+ void run() {
OID a;
OID b;
@@ -736,7 +776,7 @@ namespace JsobjTests {
class initParse1 {
public:
- void run(){
+ void run() {
OID a;
OID b;
@@ -750,7 +790,7 @@ namespace JsobjTests {
class append {
public:
- void run(){
+ void run() {
BSONObjBuilder b;
b.appendOID( "a" , 0 );
b.appendOID( "b" , 0 , false );
@@ -766,18 +806,18 @@ namespace JsobjTests {
class increasing {
public:
- BSONObj g(){
+ BSONObj g() {
BSONObjBuilder b;
b.appendOID( "_id" , 0 , true );
return b.obj();
}
- void run(){
+ void run() {
BSONObj a = g();
BSONObj b = g();
-
+
ASSERT( a.woCompare( b ) < 0 );
-
- // yes, there is a 1/1000 chance this won't increase time(0)
+
+ // yes, there is a 1/1000 chance this won't increase time(0)
// and therefore inaccurately say the function is behaving
// buf if its broken, it will fail 999/1000, so i think that's good enough
sleepsecs( 1 );
@@ -788,7 +828,7 @@ namespace JsobjTests {
class ToDate {
public:
- void run(){
+ void run() {
OID oid;
{
@@ -812,7 +852,7 @@ namespace JsobjTests {
class FromDate {
public:
- void run(){
+ void run() {
OID min, oid, max;
Date_t now = jsTime();
oid.init(); // slight chance this has different time. If its a problem, can change.
@@ -890,26 +930,26 @@ namespace JsobjTests {
class LabelMulti : public LabelBase {
BSONObj expected() {
return BSON( "z" << "q"
- << "a" << BSON( "$gt" << 1 << "$lte" << "x" )
- << "b" << BSON( "$ne" << 1 << "$ne" << "f" << "$ne" << 22.3 )
- << "x" << "p" );
+ << "a" << BSON( "$gt" << 1 << "$lte" << "x" )
+ << "b" << BSON( "$ne" << 1 << "$ne" << "f" << "$ne" << 22.3 )
+ << "x" << "p" );
}
BSONObj actual() {
return BSON( "z" << "q"
- << "a" << GT << 1 << LTE << "x"
- << "b" << NE << 1 << NE << "f" << NE << 22.3
- << "x" << "p" );
+ << "a" << GT << 1 << LTE << "x"
+ << "b" << NE << 1 << NE << "f" << NE << 22.3
+ << "x" << "p" );
}
};
class LabelishOr : public LabelBase {
BSONObj expected() {
return BSON( "$or" << BSON_ARRAY(
- BSON("a" << BSON( "$gt" << 1 << "$lte" << "x" ))
- << BSON("b" << BSON( "$ne" << 1 << "$ne" << "f" << "$ne" << 22.3 ))
- << BSON("x" << "p" )));
+ BSON("a" << BSON( "$gt" << 1 << "$lte" << "x" ))
+ << BSON("b" << BSON( "$ne" << 1 << "$ne" << "f" << "$ne" << 22.3 ))
+ << BSON("x" << "p" )));
}
BSONObj actual() {
- return OR( BSON( "a" << GT << 1 << LTE << "x"),
+ return OR( BSON( "a" << GT << 1 << LTE << "x"),
BSON( "b" << NE << 1 << NE << "f" << NE << 22.3),
BSON( "x" << "p" ) );
}
@@ -925,7 +965,7 @@ namespace JsobjTests {
class ElementAppend {
public:
- void run(){
+ void run() {
BSONObj a = BSON( "a" << 17 );
BSONObj b = BSON( "b" << a["a"] );
ASSERT_EQUALS( NumberInt , a["a"].type() );
@@ -998,23 +1038,39 @@ namespace JsobjTests {
}
};
+ class MinMaxKeyBuilder {
+ public:
+ void run() {
+ BSONObj min = BSON( "a" << MINKEY );
+ BSONObj max = BSON( "b" << MAXKEY );
+
+ ASSERT( min.valid() );
+ ASSERT( max.valid() );
+
+ BSONElement minElement = min["a"];
+ BSONElement maxElement = max["b"];
+ ASSERT( minElement.type() == MinKey );
+ ASSERT( maxElement.type() == MaxKey );
+ }
+ };
+
class MinMaxElementTest {
public:
- BSONObj min( int t ){
+ BSONObj min( int t ) {
BSONObjBuilder b;
b.appendMinForType( "a" , t );
return b.obj();
}
- BSONObj max( int t ){
+ BSONObj max( int t ) {
BSONObjBuilder b;
b.appendMaxForType( "a" , t );
return b.obj();
}
- void run(){
- for ( int t=1; t<JSTypeMax; t++ ){
+ void run() {
+ for ( int t=1; t<JSTypeMax; t++ ) {
stringstream ss;
ss << "type: " << t;
string s = ss.str();
@@ -1025,14 +1081,11 @@ namespace JsobjTests {
massert( 10407 , s , abs( min( t ).firstElement().canonicalType() - max( t ).firstElement().canonicalType() ) <= 10 );
}
}
-
-
-
};
class ExtractFieldsTest {
public:
- void run(){
+ void run() {
BSONObj x = BSON( "a" << 10 << "b" << 11 );
assert( BSON( "a" << 10 ).woCompare( x.extractFields( BSON( "a" << 1 ) ) ) == 0 );
assert( BSON( "b" << 11 ).woCompare( x.extractFields( BSON( "b" << 1 ) ) ) == 0 );
@@ -1044,10 +1097,10 @@ namespace JsobjTests {
class ComparatorTest {
public:
- BSONObj one( string s ){
+ BSONObj one( string s ) {
return BSON( "x" << s );
}
- BSONObj two( string x , string y ){
+ BSONObj two( string x , string y ) {
BSONObjBuilder b;
b.append( "x" , x );
if ( y.size() )
@@ -1057,7 +1110,7 @@ namespace JsobjTests {
return b.obj();
}
- void test( BSONObj order , BSONObj l , BSONObj r , bool wanted ){
+ void test( BSONObj order , BSONObj l , BSONObj r , bool wanted ) {
BSONObjCmp c( order );
bool got = c(l,r);
if ( got == wanted )
@@ -1065,11 +1118,11 @@ namespace JsobjTests {
cout << " order: " << order << " l: " << l << "r: " << r << " wanted: " << wanted << " got: " << got << endl;
}
- void lt( BSONObj order , BSONObj l , BSONObj r ){
+ void lt( BSONObj order , BSONObj l , BSONObj r ) {
test( order , l , r , 1 );
}
- void run(){
+ void run() {
BSONObj s = BSON( "x" << 1 );
BSONObj c = BSON( "x" << 1 << "y" << 1 );
test( s , one( "A" ) , one( "B" ) , 1 );
@@ -1093,7 +1146,7 @@ namespace JsobjTests {
namespace external_sort {
class Basic1 {
public:
- void run(){
+ void run() {
BSONObjExternalSorter sorter;
sorter.add( BSON( "x" << 10 ) , 5 , 1);
sorter.add( BSON( "x" << 2 ) , 3 , 1 );
@@ -1101,14 +1154,14 @@ namespace JsobjTests {
sorter.add( BSON( "x" << 5 ) , 7 , 1 );
sorter.sort();
-
+
auto_ptr<BSONObjExternalSorter::Iterator> i = sorter.iterator();
int num=0;
- while ( i->more() ){
+ while ( i->more() ) {
pair<BSONObj,DiskLoc> p = i->next();
if ( num == 0 )
assert( p.first["x"].number() == 2 );
- else if ( num <= 2 ){
+ else if ( num <= 2 ) {
assert( p.first["x"].number() == 5 );
}
else if ( num == 3 )
@@ -1117,15 +1170,15 @@ namespace JsobjTests {
ASSERT( 0 );
num++;
}
-
-
+
+
ASSERT_EQUALS( 0 , sorter.numFiles() );
}
};
class Basic2 {
public:
- void run(){
+ void run() {
BSONObjExternalSorter sorter( BSONObj() , 10 );
sorter.add( BSON( "x" << 10 ) , 5 , 11 );
sorter.add( BSON( "x" << 2 ) , 3 , 1 );
@@ -1133,18 +1186,18 @@ namespace JsobjTests {
sorter.add( BSON( "x" << 5 ) , 7 , 1 );
sorter.sort();
-
+
auto_ptr<BSONObjExternalSorter::Iterator> i = sorter.iterator();
int num=0;
- while ( i->more() ){
+ while ( i->more() ) {
pair<BSONObj,DiskLoc> p = i->next();
- if ( num == 0 ){
+ if ( num == 0 ) {
assert( p.first["x"].number() == 2 );
ASSERT_EQUALS( p.second.toString() , "3:1" );
}
else if ( num <= 2 )
assert( p.first["x"].number() == 5 );
- else if ( num == 3 ){
+ else if ( num == 3 ) {
assert( p.first["x"].number() == 10 );
ASSERT_EQUALS( p.second.toString() , "5:b" );
}
@@ -1158,7 +1211,7 @@ namespace JsobjTests {
class Basic3 {
public:
- void run(){
+ void run() {
BSONObjExternalSorter sorter( BSONObj() , 10 );
sorter.sort();
@@ -1171,23 +1224,23 @@ namespace JsobjTests {
class ByDiskLock {
public:
- void run(){
+ void run() {
BSONObjExternalSorter sorter;
sorter.add( BSON( "x" << 10 ) , 5 , 4);
sorter.add( BSON( "x" << 2 ) , 3 , 0 );
sorter.add( BSON( "x" << 5 ) , 6 , 2 );
sorter.add( BSON( "x" << 5 ) , 7 , 3 );
sorter.add( BSON( "x" << 5 ) , 2 , 1 );
-
+
sorter.sort();
auto_ptr<BSONObjExternalSorter::Iterator> i = sorter.iterator();
int num=0;
- while ( i->more() ){
+ while ( i->more() ) {
pair<BSONObj,DiskLoc> p = i->next();
if ( num == 0 )
assert( p.first["x"].number() == 2 );
- else if ( num <= 3 ){
+ else if ( num <= 3 ) {
assert( p.first["x"].number() == 5 );
}
else if ( num == 4 )
@@ -1205,9 +1258,9 @@ namespace JsobjTests {
class Big1 {
public:
- void run(){
+ void run() {
BSONObjExternalSorter sorter( BSONObj() , 2000 );
- for ( int i=0; i<10000; i++ ){
+ for ( int i=0; i<10000; i++ ) {
sorter.add( BSON( "x" << rand() % 10000 ) , 5 , i );
}
@@ -1216,7 +1269,7 @@ namespace JsobjTests {
auto_ptr<BSONObjExternalSorter::Iterator> i = sorter.iterator();
int num=0;
double prev = 0;
- while ( i->more() ){
+ while ( i->more() ) {
pair<BSONObj,DiskLoc> p = i->next();
num++;
double cur = p.first["x"].number();
@@ -1226,22 +1279,22 @@ namespace JsobjTests {
assert( num == 10000 );
}
};
-
+
class Big2 {
public:
- void run(){
+ void run() {
const int total = 100000;
BSONObjExternalSorter sorter( BSONObj() , total * 2 );
- for ( int i=0; i<total; i++ ){
+ for ( int i=0; i<total; i++ ) {
sorter.add( BSON( "a" << "b" ) , 5 , i );
}
sorter.sort();
-
+
auto_ptr<BSONObjExternalSorter::Iterator> i = sorter.iterator();
int num=0;
double prev = 0;
- while ( i->more() ){
+ while ( i->more() ) {
pair<BSONObj,DiskLoc> p = i->next();
num++;
double cur = p.first["x"].number();
@@ -1255,21 +1308,21 @@ namespace JsobjTests {
class D1 {
public:
- void run(){
-
+ void run() {
+
BSONObjBuilder b;
b.appendNull("");
BSONObj x = b.obj();
-
+
BSONObjExternalSorter sorter;
sorter.add(x, DiskLoc(3,7));
sorter.add(x, DiskLoc(4,7));
sorter.add(x, DiskLoc(2,7));
sorter.add(x, DiskLoc(1,7));
sorter.add(x, DiskLoc(3,77));
-
+
sorter.sort();
-
+
auto_ptr<BSONObjExternalSorter::Iterator> i = sorter.iterator();
while( i->more() ) {
BSONObjExternalSorter::Data d = i->next();
@@ -1280,14 +1333,14 @@ namespace JsobjTests {
}
};
}
-
+
class CompatBSON {
public:
-
+
#define JSONBSONTEST(j,s,m) ASSERT_EQUALS( fromjson( j ).objsize() , s ); ASSERT_EQUALS( fromjson( j ).md5() , m );
#define RAWBSONTEST(j,s,m) ASSERT_EQUALS( j.objsize() , s ); ASSERT_EQUALS( j.md5() , m );
- void run(){
+ void run() {
JSONBSONTEST( "{ 'x' : true }" , 9 , "6fe24623e4efc5cf07f027f9c66b5456" );
JSONBSONTEST( "{ 'x' : null }" , 8 , "12d43430ff6729af501faf0638e68888" );
@@ -1297,20 +1350,20 @@ namespace JsobjTests {
JSONBSONTEST( "{ 'a' : { 'b' : 1.1 } }" , 24 , "31887a4b9d55cd9f17752d6a8a45d51f" );
JSONBSONTEST( "{ 'x' : 5.2 , 'y' : { 'a' : 'eliot' , b : true } , 'z' : null }" , 44 , "b3de8a0739ab329e7aea138d87235205" );
JSONBSONTEST( "{ 'x' : 5.2 , 'y' : [ 'a' , 'eliot' , 'b' , true ] , 'z' : null }" , 62 , "cb7bad5697714ba0cbf51d113b6a0ee8" );
-
+
RAWBSONTEST( BSON( "x" << 4 ) , 12 , "d1ed8dbf79b78fa215e2ded74548d89d" );
-
+
}
};
-
+
class CompareDottedFieldNamesTest {
public:
- void t( FieldCompareResult res , const string& l , const string& r ){
+ void t( FieldCompareResult res , const string& l , const string& r ) {
ASSERT_EQUALS( res , compareDottedFieldNames( l , r ) );
ASSERT_EQUALS( -1 * res , compareDottedFieldNames( r , l ) );
}
-
- void run(){
+
+ void run() {
t( SAME , "x" , "x" );
t( SAME , "x.a" , "x.a" );
t( LEFT_BEFORE , "a" , "b" );
@@ -1320,13 +1373,13 @@ namespace JsobjTests {
}
};
- struct NestedDottedConversions{
- void t(const BSONObj& nest, const BSONObj& dot){
+ struct NestedDottedConversions {
+ void t(const BSONObj& nest, const BSONObj& dot) {
ASSERT_EQUALS( nested2dotted(nest), dot);
ASSERT_EQUALS( nest, dotted2nested(dot));
}
- void run(){
+ void run() {
t( BSON("a" << BSON("b" << 1)), BSON("a.b" << 1) );
t( BSON("a" << BSON("b" << 1 << "c" << 1)), BSON("a.b" << 1 << "a.c" << 1) );
t( BSON("a" << BSON("b" << 1 << "c" << 1) << "d" << 1), BSON("a.b" << 1 << "a.c" << 1 << "d" << 1) );
@@ -1334,8 +1387,8 @@ namespace JsobjTests {
}
};
- struct BSONArrayBuilderTest{
- void run(){
+ struct BSONArrayBuilderTest {
+ void run() {
int i = 0;
BSONObjBuilder objb;
BSONArrayBuilder arrb;
@@ -1374,13 +1427,13 @@ namespace JsobjTests {
ASSERT_EQUALS(o["arr2"].type(), Array);
}
};
-
- struct ArrayMacroTest{
- void run(){
+
+ struct ArrayMacroTest {
+ void run() {
BSONArray arr = BSON_ARRAY( "hello" << 1 << BSON( "foo" << BSON_ARRAY( "bar" << "baz" << "qux" ) ) );
BSONObj obj = BSON( "0" << "hello"
- << "1" << 1
- << "2" << BSON( "foo" << BSON_ARRAY( "bar" << "baz" << "qux" ) ) );
+ << "1" << 1
+ << "2" << BSON( "foo" << BSON_ARRAY( "bar" << "baz" << "qux" ) ) );
ASSERT_EQUALS(arr, obj);
ASSERT_EQUALS(arr["2"].type(), Object);
@@ -1390,25 +1443,25 @@ namespace JsobjTests {
class NumberParsing {
public:
- void run(){
+ void run() {
BSONObjBuilder a;
BSONObjBuilder b;
a.append( "a" , (int)1 );
ASSERT( b.appendAsNumber( "a" , "1" ) );
-
+
a.append( "b" , 1.1 );
ASSERT( b.appendAsNumber( "b" , "1.1" ) );
a.append( "c" , (int)-1 );
ASSERT( b.appendAsNumber( "c" , "-1" ) );
-
+
a.append( "d" , -1.1 );
ASSERT( b.appendAsNumber( "d" , "-1.1" ) );
a.append( "e" , (long long)32131231231232313LL );
ASSERT( b.appendAsNumber( "e" , "32131231231232313" ) );
-
+
ASSERT( ! b.appendAsNumber( "f" , "zz" ) );
ASSERT( ! b.appendAsNumber( "f" , "5zz" ) );
ASSERT( ! b.appendAsNumber( "f" , "zz5" ) );
@@ -1416,10 +1469,10 @@ namespace JsobjTests {
ASSERT_EQUALS( a.obj() , b.obj() );
}
};
-
+
class bson2settest {
public:
- void run(){
+ void run() {
BSONObj o = BSON( "z" << 1 << "a" << 2 << "m" << 3 << "c" << 4 );
BSONObjIteratorSorted i( o );
stringstream ss;
@@ -1429,7 +1482,7 @@ namespace JsobjTests {
{
Timer t;
- for ( int i=0; i<10000; i++ ){
+ for ( int i=0; i<10000; i++ ) {
BSONObjIteratorSorted j( o );
int l = 0;
while ( j.more() )
@@ -1444,22 +1497,22 @@ namespace JsobjTests {
class checkForStorageTests {
public:
-
- void good( string s ){
+
+ void good( string s ) {
BSONObj o = fromjson( s );
if ( o.okForStorage() )
return;
throw UserException( 12528 , (string)"should be ok for storage:" + s );
}
- void bad( string s ){
+ void bad( string s ) {
BSONObj o = fromjson( s );
if ( ! o.okForStorage() )
return;
throw UserException( 12529 , (string)"should NOT be ok for storage:" + s );
}
- void run(){
+ void run() {
good( "{x:1}" );
bad( "{'x.y':1}" );
@@ -1470,7 +1523,7 @@ namespace JsobjTests {
class InvalidIDFind {
public:
- void run(){
+ void run() {
BSONObj x = BSON( "_id" << 5 << "t" << 2 );
{
char * crap = (char*)malloc( x.objsize() );
@@ -1479,7 +1532,7 @@ namespace JsobjTests {
ASSERT_EQUALS( x , y );
free( crap );
}
-
+
{
char * crap = (char*)malloc( x.objsize() );
memcpy( crap , x.objdata() , x.objsize() );
@@ -1490,21 +1543,21 @@ namespace JsobjTests {
BSONObj y( crap , false );
state = 1;
}
- catch ( std::exception& e ){
+ catch ( std::exception& e ) {
state = 2;
ASSERT( strstr( e.what() , "_id: 5" ) > 0 );
}
free( crap );
ASSERT_EQUALS( 2 , state );
}
-
-
+
+
}
};
class ElementSetTest {
public:
- void run(){
+ void run() {
BSONObj x = BSON( "a" << 1 << "b" << 1 << "c" << 2 );
BSONElement a = x["a"];
BSONElement b = x["b"];
@@ -1512,7 +1565,7 @@ namespace JsobjTests {
cout << "c: " << c << endl;
ASSERT( a.woCompare( b ) != 0 );
ASSERT( a.woCompare( b , false ) == 0 );
-
+
BSONElementSet s;
s.insert( a );
ASSERT_EQUALS( 1U , s.size() );
@@ -1523,8 +1576,8 @@ namespace JsobjTests {
ASSERT( s.find( a ) != s.end() );
ASSERT( s.find( b ) != s.end() );
ASSERT( s.find( c ) == s.end() );
-
-
+
+
s.insert( c );
ASSERT_EQUALS( 2U , s.size() );
@@ -1536,12 +1589,22 @@ namespace JsobjTests {
ASSERT( s.count( a ) );
ASSERT( s.count( b ) );
ASSERT( s.count( c ) );
+
+ {
+ BSONElementSet x;
+ BSONObj o = fromjson( "{ 'a' : [ 1 , 2 , 1 ] }" );
+ BSONObjIterator i( o["a"].embeddedObjectUserCheck() );
+ while ( i.more() ) {
+ x.insert( i.next() );
+ }
+ ASSERT_EQUALS( 2U , x.size() );
+ }
}
};
class EmbeddedNumbers {
public:
- void run(){
+ void run() {
BSONObj x = BSON( "a" << BSON( "b" << 1 ) );
BSONObj y = BSON( "a" << BSON( "b" << 1.0 ) );
ASSERT_EQUALS( x , y );
@@ -1551,12 +1614,12 @@ namespace JsobjTests {
class BuilderPartialItearte {
public:
- void run(){
+ void run() {
{
BSONObjBuilder b;
b.append( "x" , 1 );
b.append( "y" , 2 );
-
+
BSONObjIterator i = b.iterator();
ASSERT( i.more() );
ASSERT_EQUALS( 1 , i.next().numberInt() );
@@ -1577,13 +1640,13 @@ namespace JsobjTests {
ASSERT_EQUALS( BSON( "x" << 1 << "y" << 2 << "z" << 3 ) , b.obj() );
}
-
+
}
};
class BSONFieldTests {
public:
- void run(){
+ void run() {
{
BSONField<int> x("x");
BSONObj o = BSON( x << 5 );
@@ -1610,11 +1673,11 @@ namespace JsobjTests {
class BSONForEachTest {
public:
- void run(){
+ void run() {
BSONObj obj = BSON("a" << 1 << "a" << 2 << "a" << 3);
-
+
int count = 0;
- BSONForEach(e, obj){
+ BSONForEach(e, obj) {
ASSERT_EQUALS( e.fieldName() , string("a") );
count += e.Int();
}
@@ -1625,7 +1688,7 @@ namespace JsobjTests {
class StringDataTest {
public:
- void run(){
+ void run() {
StringData a( string( "aaa" ) );
ASSERT_EQUALS( 3u , a.size() );
@@ -1645,8 +1708,8 @@ namespace JsobjTests {
class CompareOps {
public:
- void run(){
-
+ void run() {
+
BSONObj a = BSON("a"<<1);
BSONObj b = BSON("a"<<1);
BSONObj c = BSON("a"<<2);
@@ -1657,7 +1720,7 @@ namespace JsobjTests {
ASSERT( ! ( a < b ) );
ASSERT( a <= b );
ASSERT( a < c );
-
+
ASSERT( f > d );
ASSERT( f >= e );
ASSERT( ! ( f > e ) );
@@ -1666,12 +1729,12 @@ namespace JsobjTests {
class HashingTest {
public:
- void run(){
+ void run() {
int N = 100000;
- BSONObj x = BSON( "name" << "eliot was here"
+ BSONObj x = BSON( "name" << "eliot was here"
<< "x" << 5
<< "asdasdasdas" << "asldkasldjasldjasldjlasjdlasjdlasdasdasdasdasdasdasd" );
-
+
{
Timer t;
for ( int i=0; i<N; i++ )
@@ -1679,7 +1742,7 @@ namespace JsobjTests {
int millis = t.millis();
cout << "md5 : " << millis << endl;
}
-
+
{
Timer t;
for ( int i=0; i<N; i++ )
@@ -1694,17 +1757,17 @@ namespace JsobjTests {
checksum( x.objdata() , x.objsize() );
int millis = t.millis();
cout << "checksum : " << millis << endl;
- }
-
+ }
+
}
};
class All : public Suite {
public:
- All() : Suite( "jsobj" ){
+ All() : Suite( "jsobj" ) {
}
- void setupTests(){
+ void setupTests() {
add< BufBuilderBasic >();
add< BSONElementBasic >();
add< BSONObjTests::Create >();
@@ -1724,6 +1787,10 @@ namespace JsobjTests {
add< BSONObjTests::ToStringArray >();
add< BSONObjTests::ToStringNumber >();
add< BSONObjTests::NullString >();
+ add< BSONObjTests::AppendAs >();
+ add< BSONObjTests::ArrayAppendAs >();
+ add< BSONObjTests::GetField >();
+
add< BSONObjTests::Validation::BadType >();
add< BSONObjTests::Validation::EooBeforeEnd >();
add< BSONObjTests::Validation::Undefined >();
@@ -1771,16 +1838,13 @@ namespace JsobjTests {
add< ValueStreamTests::LabelishOr >();
add< ValueStreamTests::Unallowed >();
add< ValueStreamTests::ElementAppend >();
- add< SubObjectBuilder >();
- add< DateBuilder >();
- add< DateNowBuilder >();
- add< TimeTBuilder >();
add< ValueStreamTests::Unallowed >();
add< ValueStreamTests::ElementAppend >();
add< SubObjectBuilder >();
add< DateBuilder >();
add< DateNowBuilder >();
add< TimeTBuilder >();
+ add< MinMaxKeyBuilder >();
add< MinMaxElementTest >();
add< ComparatorTest >();
add< ExtractFieldsTest >();
@@ -1810,6 +1874,6 @@ namespace JsobjTests {
add< HashingTest >();
}
} myall;
-
+
} // namespace JsobjTests
diff --git a/dbtests/jsontests.cpp b/dbtests/jsontests.cpp
index 990558e..b630523 100644
--- a/dbtests/jsontests.cpp
+++ b/dbtests/jsontests.cpp
@@ -205,11 +205,11 @@ namespace JsonTests {
b.appendDBRef( "a", "namespace", oid );
BSONObj built = b.done();
ASSERT_EQUALS( "{ \"a\" : { \"$ref\" : \"namespace\", \"$id\" : \"ffffffffffffffffffffffff\" } }",
- built.jsonString( Strict ) );
+ built.jsonString( Strict ) );
ASSERT_EQUALS( "{ \"a\" : { \"$ref\" : \"namespace\", \"$id\" : \"ffffffffffffffffffffffff\" } }",
- built.jsonString( JS ) );
+ built.jsonString( JS ) );
ASSERT_EQUALS( "{ \"a\" : Dbref( \"namespace\", \"ffffffffffffffffffffffff\" ) }",
- built.jsonString( TenGen ) );
+ built.jsonString( TenGen ) );
}
};
@@ -221,7 +221,7 @@ namespace JsonTests {
BSONObjBuilder b;
b.appendDBRef( "a", "namespace", oid );
ASSERT_EQUALS( "{ \"a\" : { \"$ref\" : \"namespace\", \"$id\" : \"000000000000000000000000\" } }",
- b.done().jsonString( Strict ) );
+ b.done().jsonString( Strict ) );
}
};
@@ -234,9 +234,9 @@ namespace JsonTests {
b.appendOID( "a", &oid );
BSONObj built = b.done();
ASSERT_EQUALS( "{ \"a\" : { \"$oid\" : \"ffffffffffffffffffffffff\" } }",
- built.jsonString( Strict ) );
+ built.jsonString( Strict ) );
ASSERT_EQUALS( "{ \"a\" : ObjectId( \"ffffffffffffffffffffffff\" ) }",
- built.jsonString( TenGen ) );
+ built.jsonString( TenGen ) );
}
};
@@ -258,12 +258,12 @@ namespace JsonTests {
BSONObjBuilder c;
c.appendBinData( "a", 2, BinDataGeneral, z );
ASSERT_EQUALS( "{ \"a\" : { \"$binary\" : \"YWI=\", \"$type\" : \"00\" } }",
- c.done().jsonString( Strict ) );
+ c.done().jsonString( Strict ) );
BSONObjBuilder d;
d.appendBinData( "a", 1, BinDataGeneral, z );
ASSERT_EQUALS( "{ \"a\" : { \"$binary\" : \"YQ==\", \"$type\" : \"00\" } }",
- d.done().jsonString( Strict ) );
+ d.done().jsonString( Strict ) );
}
};
@@ -295,7 +295,7 @@ namespace JsonTests {
b.appendRegex( "a", "abc", "i" );
BSONObj built = b.done();
ASSERT_EQUALS( "{ \"a\" : { \"$regex\" : \"abc\", \"$options\" : \"i\" } }",
- built.jsonString( Strict ) );
+ built.jsonString( Strict ) );
ASSERT_EQUALS( "{ \"a\" : /abc/i }", built.jsonString( TenGen ) );
ASSERT_EQUALS( "{ \"a\" : /abc/i }", built.jsonString( JS ) );
}
@@ -308,7 +308,7 @@ namespace JsonTests {
b.appendRegex( "a", "/\"", "i" );
BSONObj built = b.done();
ASSERT_EQUALS( "{ \"a\" : { \"$regex\" : \"/\\\"\", \"$options\" : \"i\" } }",
- built.jsonString( Strict ) );
+ built.jsonString( Strict ) );
ASSERT_EQUALS( "{ \"a\" : /\\/\\\"/i }", built.jsonString( TenGen ) );
ASSERT_EQUALS( "{ \"a\" : /\\/\\\"/i }", built.jsonString( JS ) );
}
@@ -321,7 +321,7 @@ namespace JsonTests {
b.appendRegex( "a", "z", "abcgimx" );
BSONObj built = b.done();
ASSERT_EQUALS( "{ \"a\" : { \"$regex\" : \"z\", \"$options\" : \"abcgimx\" } }",
- built.jsonString( Strict ) );
+ built.jsonString( Strict ) );
ASSERT_EQUALS( "{ \"a\" : /z/gim }", built.jsonString( TenGen ) );
ASSERT_EQUALS( "{ \"a\" : /z/gim }", built.jsonString( JS ) );
}
@@ -329,17 +329,17 @@ namespace JsonTests {
class CodeTests {
public:
- void run(){
+ void run() {
BSONObjBuilder b;
b.appendCode( "x" , "function(){ return 1; }" );
BSONObj o = b.obj();
ASSERT_EQUALS( "{ \"x\" : function(){ return 1; } }" , o.jsonString() );
}
};
-
+
class TimestampTests {
public:
- void run(){
+ void run() {
BSONObjBuilder b;
b.appendTimestamp( "x" , 4000 , 10 );
BSONObj o = b.obj();
@@ -349,7 +349,7 @@ namespace JsonTests {
class NullString {
public:
- void run(){
+ void run() {
BSONObjBuilder b;
b.append( "x" , "a\0b" , 4 );
BSONObj o = b.obj();
@@ -359,7 +359,7 @@ namespace JsonTests {
class AllTypes {
public:
- void run(){
+ void run() {
OID oid;
oid.init();
@@ -384,12 +384,12 @@ namespace JsonTests {
b.appendTimestamp( "s" , 123123123123123LL );
b.append( "t" , 12321312312LL );
b.appendMaxKey( "u" );
-
+
BSONObj o = b.obj();
cout << o.jsonString() << endl;
}
};
-
+
} // namespace JsonStringTests
namespace FromJsonTests {
@@ -504,7 +504,7 @@ namespace JsonTests {
virtual ~FancyNumber() {}
void run() {
ASSERT_EQUALS( int( 1000000 * bson().firstElement().number() ),
- int( 1000000 * fromjson( json() ).firstElement().number() ) );
+ int( 1000000 * fromjson( json() ).firstElement().number() ) );
}
virtual BSONObj bson() const {
BSONObjBuilder b;
@@ -978,8 +978,8 @@ namespace JsonTests {
};
class NumericTypes : public Base {
- public:
- void run(){
+ public:
+ void run() {
Base::run();
BSONObj o = fromjson(json());
@@ -990,12 +990,12 @@ namespace JsonTests {
ASSERT(o["long"].numberLong() == 9223372036854775807ll);
}
-
+
virtual BSONObj bson() const {
return BSON( "int" << 123
- << "long" << 9223372036854775807ll // 2**63 - 1
- << "double" << 3.14
- );
+ << "long" << 9223372036854775807ll // 2**63 - 1
+ << "double" << 3.14
+ );
}
virtual string json() const {
return "{ \"int\": 123, \"long\": 9223372036854775807, \"double\": 3.14 }";
@@ -1003,8 +1003,8 @@ namespace JsonTests {
};
class NegativeNumericTypes : public Base {
- public:
- void run(){
+ public:
+ void run() {
Base::run();
BSONObj o = fromjson(json());
@@ -1015,12 +1015,12 @@ namespace JsonTests {
ASSERT(o["long"].numberLong() == -9223372036854775807ll);
}
-
+
virtual BSONObj bson() const {
return BSON( "int" << -123
- << "long" << -9223372036854775807ll // -1 * (2**63 - 1)
- << "double" << -3.14
- );
+ << "long" << -9223372036854775807ll // -1 * (2**63 - 1)
+ << "double" << -3.14
+ );
}
virtual string json() const {
return "{ \"int\": -123, \"long\": -9223372036854775807, \"double\": -3.14 }";
@@ -1029,8 +1029,8 @@ namespace JsonTests {
class EmbeddedDatesBase : public Base {
public:
-
- virtual void run(){
+
+ virtual void run() {
BSONObj o = fromjson( json() );
ASSERT_EQUALS( 3 , (o["time.valid"].type()) );
BSONObj e = o["time.valid"].embeddedObjectUserCheck();
@@ -1038,7 +1038,7 @@ namespace JsonTests {
ASSERT_EQUALS( 9 , e["$lt"].type() );
Base::run();
}
-
+
BSONObj bson() const {
BSONObjBuilder e;
e.appendDate( "$gt" , 1257829200000LL );
@@ -1082,10 +1082,10 @@ namespace JsonTests {
class All : public Suite {
public:
- All() : Suite( "json" ){
+ All() : Suite( "json" ) {
}
- void setupTests(){
+ void setupTests() {
add< JsonStringTests::Empty >();
add< JsonStringTests::SingleStringMember >();
add< JsonStringTests::EscapedCharacters >();
@@ -1116,7 +1116,7 @@ namespace JsonTests {
add< JsonStringTests::TimestampTests >();
add< JsonStringTests::NullString >();
add< JsonStringTests::AllTypes >();
-
+
add< FromJsonTests::Empty >();
add< FromJsonTests::EmptyWithSpace >();
add< FromJsonTests::SingleString >();
diff --git a/dbtests/jstests.cpp b/dbtests/jstests.cpp
index a9d9db8..c33b200 100644
--- a/dbtests/jstests.cpp
+++ b/dbtests/jstests.cpp
@@ -1,4 +1,4 @@
-// javajstests.cpp
+// javajstests.cpp
//
/**
@@ -22,15 +22,16 @@
#include "../pch.h"
#include "../scripting/engine.h"
+#include "../util/timer.h"
#include "dbtests.h"
namespace mongo {
- bool dbEval(const char *ns, BSONObj& cmd, BSONObjBuilder& result, string& errmsg);
+ bool dbEval(const string& dbName , BSONObj& cmd, BSONObjBuilder& result, string& errmsg);
} // namespace mongo
namespace JSTests {
-
+
class Fundamental {
public:
void run() {
@@ -42,26 +43,26 @@ namespace JSTests {
globalScriptEngine->runTest();
}
};
-
+
class BasicScope {
public:
- void run(){
+ void run() {
auto_ptr<Scope> s;
s.reset( globalScriptEngine->newScope() );
s->setNumber( "x" , 5 );
ASSERT( 5 == s->getNumber( "x" ) );
-
+
s->setNumber( "x" , 1.67 );
ASSERT( 1.67 == s->getNumber( "x" ) );
s->setString( "s" , "eliot was here" );
ASSERT( "eliot was here" == s->getString( "s" ) );
-
+
s->setBoolean( "b" , true );
ASSERT( s->getBoolean( "b" ) );
- if ( 0 ){
+ if ( 0 ) {
s->setBoolean( "b" , false );
ASSERT( ! s->getBoolean( "b" ) );
}
@@ -70,12 +71,12 @@ namespace JSTests {
class ResetScope {
public:
- void run(){
+ void run() {
// Not worrying about this for now SERVER-446.
/*
auto_ptr<Scope> s;
s.reset( globalScriptEngine->newScope() );
-
+
s->setBoolean( "x" , true );
ASSERT( s->getBoolean( "x" ) );
@@ -84,36 +85,36 @@ namespace JSTests {
*/
}
};
-
+
class FalseTests {
public:
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
ASSERT( ! s->getBoolean( "x" ) );
-
+
s->setString( "z" , "" );
ASSERT( ! s->getBoolean( "z" ) );
-
-
+
+
delete s ;
}
};
class SimpleFunctions {
public:
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
s->invoke( "x=5;" , BSONObj() );
ASSERT( 5 == s->getNumber( "x" ) );
-
+
s->invoke( "return 17;" , BSONObj() );
ASSERT( 17 == s->getNumber( "return" ) );
-
+
s->invoke( "function(){ return 17; }" , BSONObj() );
ASSERT( 17 == s->getNumber( "return" ) );
-
+
s->setNumber( "x" , 1.76 );
s->invoke( "return x == 1.76; " , BSONObj() );
ASSERT( s->getBoolean( "return" ) );
@@ -121,7 +122,7 @@ namespace JSTests {
s->setNumber( "x" , 1.76 );
s->invoke( "return x == 1.79; " , BSONObj() );
ASSERT( ! s->getBoolean( "return" ) );
-
+
s->invoke( "function( z ){ return 5 + z; }" , BSON( "" << 11 ) );
ASSERT_EQUALS( 16 , s->getNumber( "return" ) );
@@ -131,9 +132,9 @@ namespace JSTests {
class ObjectMapping {
public:
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
-
+
BSONObj o = BSON( "x" << 17 << "y" << "eliot" << "z" << "sara" );
s->setObject( "blah" , o );
@@ -154,7 +155,7 @@ namespace JSTests {
s->invoke( "this.z == 'asara';" , BSONObj() );
ASSERT_EQUALS( false , s->getBoolean( "return" ) );
-
+
s->invoke( "return this.x == 17;" , BSONObj() );
ASSERT_EQUALS( true , s->getBoolean( "return" ) );
@@ -169,28 +170,28 @@ namespace JSTests {
s->invoke( "function (){ return this.x == 17; }" , BSONObj() );
ASSERT_EQUALS( true , s->getBoolean( "return" ) );
-
+
s->invoke( "function z(){ return this.x == 18; }" , BSONObj() );
ASSERT_EQUALS( false , s->getBoolean( "return" ) );
s->invoke( "function (){ this.x == 17; }" , BSONObj() );
ASSERT_EQUALS( false , s->getBoolean( "return" ) );
-
+
s->invoke( "function z(){ this.x == 18; }" , BSONObj() );
ASSERT_EQUALS( false , s->getBoolean( "return" ) );
s->invoke( "x = 5; for( ; x <10; x++){ a = 1; }" , BSONObj() );
ASSERT_EQUALS( 10 , s->getNumber( "x" ) );
-
+
delete s;
}
};
class ObjectDecoding {
public:
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
-
+
s->invoke( "z = { num : 1 };" , BSONObj() );
BSONObj out = s->getObject( "z" );
ASSERT_EQUALS( 1 , out["num"].number() );
@@ -200,43 +201,43 @@ namespace JSTests {
out = s->getObject( "z" );
ASSERT_EQUALS( (string)"eliot" , out["x"].valuestr() );
ASSERT_EQUALS( 1 , out.nFields() );
-
+
BSONObj o = BSON( "x" << 17 );
- s->setObject( "blah" , o );
+ s->setObject( "blah" , o );
out = s->getObject( "blah" );
ASSERT_EQUALS( 17 , out["x"].number() );
-
+
delete s;
}
};
-
+
class JSOIDTests {
public:
- void run(){
+ void run() {
#ifdef MOZJS
Scope * s = globalScriptEngine->newScope();
-
+
s->localConnect( "blah" );
-
+
s->invoke( "z = { _id : new ObjectId() , a : 123 };" , BSONObj() );
BSONObj out = s->getObject( "z" );
ASSERT_EQUALS( 123 , out["a"].number() );
ASSERT_EQUALS( jstOID , out["_id"].type() );
-
+
OID save = out["_id"].__oid();
-
+
s->setObject( "a" , out );
-
- s->invoke( "y = { _id : a._id , a : 124 };" , BSONObj() );
+
+ s->invoke( "y = { _id : a._id , a : 124 };" , BSONObj() );
out = s->getObject( "y" );
ASSERT_EQUALS( 124 , out["a"].number() );
- ASSERT_EQUALS( jstOID , out["_id"].type() );
+ ASSERT_EQUALS( jstOID , out["_id"].type() );
ASSERT_EQUALS( out["_id"].__oid().str() , save.str() );
- s->invoke( "y = { _id : new ObjectId( a._id ) , a : 125 };" , BSONObj() );
+ s->invoke( "y = { _id : new ObjectId( a._id ) , a : 125 };" , BSONObj() );
out = s->getObject( "y" );
ASSERT_EQUALS( 125 , out["a"].number() );
- ASSERT_EQUALS( jstOID , out["_id"].type() );
+ ASSERT_EQUALS( jstOID , out["_id"].type() );
ASSERT_EQUALS( out["_id"].__oid().str() , save.str() );
delete s;
@@ -267,9 +268,9 @@ namespace JSTests {
class ObjectModReadonlyTests {
public:
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
-
+
BSONObj o = BSON( "x" << 17 << "y" << "eliot" << "z" << "sara" << "zz" << BSONObj() );
s->setObject( "blah" , o , true );
@@ -288,16 +289,16 @@ namespace JSTests {
s->setObject( "blah.zz", BSON( "a" << 19 ) );
out = s->getObject( "blah" );
ASSERT( out["zz"].embeddedObject()["a"].eoo() );
-
+
s->invoke( "delete blah['x']" , BSONObj() );
out = s->getObject( "blah" );
ASSERT( !out["x"].eoo() );
-
+
// read-only object itself can be overwritten
s->invoke( "blah = {}", BSONObj() );
out = s->getObject( "blah" );
ASSERT( out.isEmpty() );
-
+
// test array - can't implement this in v8
// o = fromjson( "{a:[1,2,3]}" );
// s->setObject( "blah", o, true );
@@ -307,45 +308,47 @@ namespace JSTests {
// out = s->getObject( "blah" );
// ASSERT_EQUALS( 1.0, out[ "a" ].embeddedObject()[ 0 ].number() );
// ASSERT_EQUALS( 3.0, out[ "a" ].embeddedObject()[ 2 ].number() );
-
+
delete s;
}
};
class OtherJSTypes {
public:
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
-
- { // date
+
+ {
+ // date
BSONObj o;
- {
+ {
BSONObjBuilder b;
b.appendDate( "d" , 123456789 );
o = b.obj();
}
s->setObject( "x" , o );
-
+
s->invoke( "return x.d.getTime() != 12;" , BSONObj() );
ASSERT_EQUALS( true, s->getBoolean( "return" ) );
-
+
s->invoke( "z = x.d.getTime();" , BSONObj() );
ASSERT_EQUALS( 123456789 , s->getNumber( "z" ) );
-
+
s->invoke( "z = { z : x.d }" , BSONObj() );
BSONObj out = s->getObject( "z" );
ASSERT( out["z"].type() == Date );
}
- { // regex
+ {
+ // regex
BSONObj o;
- {
+ {
BSONObjBuilder b;
b.appendRegex( "r" , "^a" , "i" );
o = b.obj();
}
s->setObject( "x" , o );
-
+
s->invoke( "z = x.r.test( 'b' );" , BSONObj() );
ASSERT_EQUALS( false , s->getBoolean( "z" ) );
@@ -362,26 +365,26 @@ namespace JSTests {
ASSERT_EQUALS( (string)"i" , out["a"].regexFlags() );
}
-
+
// array
{
BSONObj o = fromjson( "{r:[1,2,3]}" );
- s->setObject( "x", o, false );
+ s->setObject( "x", o, false );
BSONObj out = s->getObject( "x" );
ASSERT_EQUALS( Array, out.firstElement().type() );
- s->setObject( "x", o, true );
+ s->setObject( "x", o, true );
out = s->getObject( "x" );
ASSERT_EQUALS( Array, out.firstElement().type() );
}
-
+
delete s;
}
};
class SpecialDBTypes {
public:
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
BSONObjBuilder b;
@@ -389,7 +392,7 @@ namespace JSTests {
b.appendMinKey( "b" );
b.appendMaxKey( "c" );
b.appendTimestamp( "d" , 1234000 , 9876 );
-
+
{
BSONObj t = b.done();
@@ -398,7 +401,7 @@ namespace JSTests {
}
s->setObject( "z" , b.obj() );
-
+
ASSERT( s->invoke( "y = { a : z.a , b : z.b , c : z.c , d: z.d }" , BSONObj() ) == 0 );
BSONObj out = s->getObject( "y" );
@@ -414,14 +417,14 @@ namespace JSTests {
delete s;
}
};
-
+
class TypeConservation {
public:
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
-
+
// -- A --
-
+
BSONObj o;
{
BSONObjBuilder b ;
@@ -431,7 +434,7 @@ namespace JSTests {
}
ASSERT_EQUALS( NumberInt , o["a"].type() );
ASSERT_EQUALS( NumberDouble , o["b"].type() );
-
+
s->setObject( "z" , o );
s->invoke( "return z" , BSONObj() );
BSONObj out = s->getObject( "return" );
@@ -442,7 +445,7 @@ namespace JSTests {
ASSERT_EQUALS( NumberInt , out["a"].type() );
// -- B --
-
+
{
BSONObjBuilder b ;
b.append( "a" , (int)5 );
@@ -459,31 +462,31 @@ namespace JSTests {
ASSERT_EQUALS( NumberDouble , out["b"].type() );
ASSERT_EQUALS( NumberInt , out["a"].type() );
-
+
// -- C --
-
+
{
BSONObjBuilder b ;
-
+
{
BSONObjBuilder c;
c.append( "0" , 5.5 );
c.append( "1" , 6 );
b.appendArray( "a" , c.obj() );
}
-
+
o = b.obj();
}
-
+
ASSERT_EQUALS( NumberDouble , o["a"].embeddedObjectUserCheck()["0"].type() );
ASSERT_EQUALS( NumberInt , o["a"].embeddedObjectUserCheck()["1"].type() );
-
+
s->setObject( "z" , o , false );
out = s->getObject( "z" );
ASSERT_EQUALS( NumberDouble , out["a"].embeddedObjectUserCheck()["0"].type() );
ASSERT_EQUALS( NumberInt , out["a"].embeddedObjectUserCheck()["1"].type() );
-
+
s->invokeSafe( "z.z = 5;" , BSONObj() );
out = s->getObject( "z" );
ASSERT_EQUALS( 5 , out["z"].number() );
@@ -493,9 +496,9 @@ namespace JSTests {
// Eliot says I don't have to worry about this case
-
+
// // -- D --
-//
+//
// o = fromjson( "{a:3.0,b:4.5}" );
// ASSERT_EQUALS( NumberDouble , o["a"].type() );
// ASSERT_EQUALS( NumberDouble , o["b"].type() );
@@ -505,20 +508,20 @@ namespace JSTests {
// out = s->getObject( "return" );
// ASSERT_EQUALS( 3 , out["a"].number() );
// ASSERT_EQUALS( 4.5 , out["b"].number() );
-//
+//
// ASSERT_EQUALS( NumberDouble , out["b"].type() );
// ASSERT_EQUALS( NumberDouble , out["a"].type() );
-//
-
+//
+
delete s;
}
-
+
};
-
+
class NumberLong {
public:
void run() {
- Scope * s = globalScriptEngine->newScope();
+ auto_ptr<Scope> s( globalScriptEngine->newScope() );
s->localConnect( "blah" );
BSONObjBuilder b;
long long val = (long long)( 0xbabadeadbeefbaddULL );
@@ -527,7 +530,7 @@ namespace JSTests {
s->setObject( "a", in );
BSONObj out = s->getObject( "a" );
ASSERT_EQUALS( mongo::NumberLong, out.firstElement().type() );
-
+
ASSERT( s->exec( "printjson( a ); b = {b:a.a}", "foo", false, true, false ) );
out = s->getObject( "b" );
ASSERT_EQUALS( mongo::NumberLong, out.firstElement().type() );
@@ -537,7 +540,7 @@ namespace JSTests {
cout << out.toString() << endl;
ASSERT_EQUALS( val, out.firstElement().numberLong() );
}
-
+
ASSERT( s->exec( "c = {c:a.a.toString()}", "foo", false, true, false ) );
out = s->getObject( "c" );
stringstream ss;
@@ -552,12 +555,12 @@ namespace JSTests {
ASSERT( s->exec( "e = {e:a.a.floatApprox}", "foo", false, true, false ) );
out = s->getObject( "e" );
ASSERT_EQUALS( NumberDouble, out.firstElement().type() );
- ASSERT_EQUALS( double( val ), out.firstElement().number() );
+ ASSERT_EQUALS( double( val ), out.firstElement().number() );
ASSERT( s->exec( "f = {f:a.a.top}", "foo", false, true, false ) );
out = s->getObject( "f" );
ASSERT( NumberDouble == out.firstElement().type() || NumberInt == out.firstElement().type() );
-
+
s->setObject( "z", BSON( "z" << (long long)( 4 ) ) );
ASSERT( s->exec( "y = {y:z.z.top}", "foo", false, true, false ) );
out = s->getObject( "y" );
@@ -566,36 +569,64 @@ namespace JSTests {
ASSERT( s->exec( "x = {x:z.z.floatApprox}", "foo", false, true, false ) );
out = s->getObject( "x" );
ASSERT( NumberDouble == out.firstElement().type() || NumberInt == out.firstElement().type() );
- ASSERT_EQUALS( double( 4 ), out.firstElement().number() );
+ ASSERT_EQUALS( double( 4 ), out.firstElement().number() );
ASSERT( s->exec( "w = {w:z.z}", "foo", false, true, false ) );
out = s->getObject( "w" );
ASSERT_EQUALS( mongo::NumberLong, out.firstElement().type() );
- ASSERT_EQUALS( 4, out.firstElement().numberLong() );
-
+ ASSERT_EQUALS( 4, out.firstElement().numberLong() );
+
}
};
-
+
+ class NumberLong2 {
+ public:
+ void run() {
+ auto_ptr<Scope> s( globalScriptEngine->newScope() );
+ s->localConnect( "blah" );
+
+ BSONObj in;
+ {
+ BSONObjBuilder b;
+ b.append( "a" , 5 );
+ b.append( "b" , (long long)5 );
+ b.append( "c" , (long long)pow( 2.0, 29 ) );
+ b.append( "d" , (long long)pow( 2.0, 30 ) );
+ b.append( "e" , (long long)pow( 2.0, 31 ) );
+ b.append( "f" , (long long)pow( 2.0, 45 ) );
+ in = b.obj();
+ }
+ s->setObject( "a" , in );
+
+ ASSERT( s->exec( "x = tojson( a ); " ,"foo" , false , true , false ) );
+ string outString = s->getString( "x" );
+
+ ASSERT( s->exec( (string)"y = " + outString , "foo2" , false , true , false ) );
+ BSONObj out = s->getObject( "y" );
+ ASSERT_EQUALS( in , out );
+ }
+ };
+
class WeirdObjects {
public:
- BSONObj build( int depth ){
+ BSONObj build( int depth ) {
BSONObjBuilder b;
b.append( "0" , depth );
if ( depth > 0 )
b.appendArray( "1" , build( depth - 1 ) );
return b.obj();
}
-
- void run(){
+
+ void run() {
Scope * s = globalScriptEngine->newScope();
s->localConnect( "blah" );
-
- for ( int i=5; i<100 ; i += 10 ){
+
+ for ( int i=5; i<100 ; i += 10 ) {
s->setObject( "a" , build(i) , false );
s->invokeSafe( "tojson( a )" , BSONObj() );
-
+
s->setObject( "a" , build(5) , true );
s->invokeSafe( "tojson( a )" , BSONObj() );
}
@@ -609,11 +640,12 @@ namespace JSTests {
BSONObj cmd;
BSONObjBuilder result;
string errmsg;
- dbEval( "", cmd, result, errmsg);
+ dbEval( "test", cmd, result, errmsg);
+ assert(0);
}
DBDirectClient client;
-
+
class Utf8Check {
public:
Utf8Check() { reset(); }
@@ -638,7 +670,7 @@ namespace JSTests {
}
void reset() {
client.dropCollection( ns() );
- }
+ }
static const char *ns() { return "unittest.jstests.utf8check"; }
};
@@ -654,13 +686,13 @@ namespace JSTests {
private:
void reset() {
client.dropCollection( ns() );
- }
+ }
static const char *ns() { return "unittest.jstests.longutf8string"; }
};
class InvalidUTF8Check {
public:
- void run(){
+ void run() {
if( !globalScriptEngine->utf8Ok() )
return;
@@ -676,24 +708,24 @@ namespace JSTests {
crap[2] = (char) 128;
crap[3] = 17;
crap[4] = 0;
-
+
BSONObjBuilder bb;
bb.append( "x" , crap );
b = bb.obj();
}
-
+
//cout << "ELIOT: " << b.jsonString() << endl;
s->setThis( &b );
// its ok if this is handled by js, just can't create a c++ exception
- s->invoke( "x=this.x.length;" , BSONObj() );
+ s->invoke( "x=this.x.length;" , BSONObj() );
}
};
-
+
class CodeTests {
public:
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
-
+
{
BSONObjBuilder b;
b.append( "a" , 1 );
@@ -702,10 +734,10 @@ namespace JSTests {
b.appendCodeWScope( "d" , "function(){ out.d = 13 + bleh; }" , BSON( "bleh" << 5 ) );
s->setObject( "foo" , b.obj() );
}
-
+
s->invokeSafe( "out = {}; out.a = foo.a; foo.b(); foo.c();" , BSONObj() );
BSONObj out = s->getObject( "out" );
-
+
ASSERT_EQUALS( 1 , out["a"].number() );
ASSERT_EQUALS( 11 , out["b"].number() );
ASSERT_EQUALS( 12 , out["c"].number() );
@@ -714,7 +746,7 @@ namespace JSTests {
//s->invokeSafe( "foo.d() " , BSONObj() );
//out = s->getObject( "out" );
//ASSERT_EQUALS( 18 , out["d"].number() );
-
+
delete s;
}
@@ -722,19 +754,19 @@ namespace JSTests {
class DBRefTest {
public:
- DBRefTest(){
+ DBRefTest() {
_a = "unittest.dbref.a";
_b = "unittest.dbref.b";
reset();
}
- ~DBRefTest(){
+ ~DBRefTest() {
//reset();
}
-
- void run(){
+
+ void run() {
client.insert( _a , BSON( "a" << "17" ) );
-
+
{
BSONObj fromA = client.findOne( _a , BSONObj() );
assert( fromA.valid() );
@@ -744,28 +776,28 @@ namespace JSTests {
b.appendDBRef( "c" , "dbref.a" , fromA["_id"].__oid() );
client.insert( _b , b.obj() );
}
-
+
ASSERT( client.eval( "unittest" , "x = db.dbref.b.findOne(); assert.eq( 17 , x.c.fetch().a , 'ref working' );" ) );
-
+
// BSON DBRef <=> JS DBPointer
ASSERT( client.eval( "unittest", "x = db.dbref.b.findOne(); db.dbref.b.drop(); x.c = new DBPointer( x.c.ns, x.c.id ); db.dbref.b.insert( x );" ) );
ASSERT_EQUALS( DBRef, client.findOne( "unittest.dbref.b", "" )[ "c" ].type() );
-
+
// BSON Object <=> JS DBRef
ASSERT( client.eval( "unittest", "x = db.dbref.b.findOne(); db.dbref.b.drop(); x.c = new DBRef( x.c.ns, x.c.id ); db.dbref.b.insert( x );" ) );
ASSERT_EQUALS( Object, client.findOne( "unittest.dbref.b", "" )[ "c" ].type() );
ASSERT_EQUALS( string( "dbref.a" ), client.findOne( "unittest.dbref.b", "" )[ "c" ].embeddedObject().getStringField( "$ref" ) );
}
-
- void reset(){
+
+ void reset() {
client.dropCollection( _a );
client.dropCollection( _b );
}
-
+
const char * _a;
const char * _b;
};
-
+
class InformalDBRef {
public:
void run() {
@@ -775,20 +807,20 @@ namespace JSTests {
client.insert( ns(), BSON( "r" << BSON( "$ref" << "jstests.informaldbref" << "$id" << obj["_id"].__oid() << "foo" << "bar" ) ) );
obj = client.findOne( ns(), BSONObj() );
ASSERT_EQUALS( "bar", obj[ "r" ].embeddedObject()[ "foo" ].str() );
-
+
ASSERT( client.eval( "unittest", "x = db.jstests.informaldbref.findOne(); y = { r:x.r }; db.jstests.informaldbref.drop(); y.r[ \"a\" ] = \"b\"; db.jstests.informaldbref.save( y );" ) );
obj = client.findOne( ns(), BSONObj() );
- ASSERT_EQUALS( "bar", obj[ "r" ].embeddedObject()[ "foo" ].str() );
- ASSERT_EQUALS( "b", obj[ "r" ].embeddedObject()[ "a" ].str() );
+ ASSERT_EQUALS( "bar", obj[ "r" ].embeddedObject()[ "foo" ].str() );
+ ASSERT_EQUALS( "b", obj[ "r" ].embeddedObject()[ "a" ].str() );
}
private:
static const char *ns() { return "unittest.jstests.informaldbref"; }
};
-
+
class BinDataType {
public:
-
- void pp( const char * s , BSONElement e ){
+
+ void pp( const char * s , BSONElement e ) {
int len;
const char * data = e.binData( len );
cout << s << ":" << e.binDataType() << "\t" << len << endl;
@@ -798,12 +830,12 @@ namespace JSTests {
cout << endl;
}
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
s->localConnect( "asd" );
const char * foo = "asdas\0asdasd";
const char * base64 = "YXNkYXMAYXNkYXNk";
-
+
BSONObj in;
{
BSONObjBuilder b;
@@ -812,10 +844,10 @@ namespace JSTests {
in = b.obj();
s->setObject( "x" , in );
}
-
+
s->invokeSafe( "myb = x.b; print( myb ); printjson( myb );" , BSONObj() );
s->invokeSafe( "y = { c : myb };" , BSONObj() );
-
+
BSONObj out = s->getObject( "y" );
ASSERT_EQUALS( BinData , out["c"].type() );
// pp( "in " , in["b"] );
@@ -827,14 +859,14 @@ namespace JSTests {
stringstream expected;
expected << "BinData(" << BinDataGeneral << ",\"" << base64 << "\")";
ASSERT_EQUALS( expected.str(), s->getString( "q" ) );
-
+
stringstream scriptBuilder;
scriptBuilder << "z = { c : new BinData( " << BinDataGeneral << ", \"" << base64 << "\" ) };";
string script = scriptBuilder.str();
s->invokeSafe( script.c_str(), BSONObj() );
out = s->getObject( "z" );
// pp( "out" , out["c"] );
- ASSERT_EQUALS( 0 , in["b"].woCompare( out["c"] , false ) );
+ ASSERT_EQUALS( 0 , in["b"].woCompare( out["c"] , false ) );
s->invokeSafe( "a = { f: new BinData( 128, \"\" ) };", BSONObj() );
out = s->getObject( "a" );
@@ -842,16 +874,16 @@ namespace JSTests {
out[ "f" ].binData( len );
ASSERT_EQUALS( 0, len );
ASSERT_EQUALS( 128, out[ "f" ].binDataType() );
-
+
delete s;
}
};
class VarTests {
public:
- void run(){
+ void run() {
Scope * s = globalScriptEngine->newScope();
-
+
ASSERT( s->exec( "a = 5;" , "a" , false , true , false ) );
ASSERT_EQUALS( 5 , s->getNumber("a" ) );
@@ -863,19 +895,19 @@ namespace JSTests {
class Speed1 {
public:
- void run(){
+ void run() {
BSONObj start = BSON( "x" << 5 );
BSONObj empty;
auto_ptr<Scope> s;
s.reset( globalScriptEngine->newScope() );
-
+
ScriptingFunction f = s->createFunction( "return this.x + 6;" );
s->setThis( &start );
-
+
Timer t;
double n = 0;
- for ( ; n < 100000; n++ ){
+ for ( ; n < 100000; n++ ) {
s->invoke( f , empty );
ASSERT_EQUALS( 11 , s->getNumber( "return" ) );
}
@@ -885,10 +917,10 @@ namespace JSTests {
class ScopeOut {
public:
- void run(){
+ void run() {
auto_ptr<Scope> s;
s.reset( globalScriptEngine->newScope() );
-
+
s->invokeSafe( "x = 5;" , BSONObj() );
{
BSONObjBuilder b;
@@ -910,18 +942,39 @@ namespace JSTests {
}
};
+ class RenameTest {
+ public:
+ void run() {
+ auto_ptr<Scope> s;
+ s.reset( globalScriptEngine->newScope() );
+
+ s->setNumber( "x" , 5 );
+ ASSERT_EQUALS( 5 , s->getNumber( "x" ) );
+ ASSERT_EQUALS( Undefined , s->type( "y" ) );
+
+ s->rename( "x" , "y" );
+ ASSERT_EQUALS( 5 , s->getNumber( "y" ) );
+ ASSERT_EQUALS( Undefined , s->type( "x" ) );
+
+ s->rename( "y" , "x" );
+ ASSERT_EQUALS( 5 , s->getNumber( "x" ) );
+ ASSERT_EQUALS( Undefined , s->type( "y" ) );
+ }
+ };
+
+
class All : public Suite {
public:
All() : Suite( "js" ) {
}
-
- void setupTests(){
+
+ void setupTests() {
add< Fundamental >();
add< BasicScope >();
add< ResetScope >();
add< FalseTests >();
add< SimpleFunctions >();
-
+
add< ObjectMapping >();
add< ObjectDecoding >();
add< JSOIDTests >();
@@ -931,15 +984,17 @@ namespace JSTests {
add< SpecialDBTypes >();
add< TypeConservation >();
add< NumberLong >();
-
+ add< NumberLong2 >();
+ add< RenameTest >();
+
add< WeirdObjects >();
add< CodeTests >();
add< DBRefTest >();
add< InformalDBRef >();
add< BinDataType >();
-
+
add< VarTests >();
-
+
add< Speed1 >();
add< InvalidUTF8Check >();
@@ -949,6 +1004,6 @@ namespace JSTests {
add< ScopeOut >();
}
} myall;
-
+
} // namespace JavaJSTests
diff --git a/dbtests/matchertests.cpp b/dbtests/matchertests.cpp
index 696c924..380b8b8 100644
--- a/dbtests/matchertests.cpp
+++ b/dbtests/matchertests.cpp
@@ -18,12 +18,15 @@
*/
#include "pch.h"
-#include "../db/matcher.h"
+#include "../util/timer.h"
+#include "../db/matcher.h"
#include "../db/json.h"
#include "dbtests.h"
+
+
namespace MatcherTests {
class Basic {
@@ -34,26 +37,26 @@ namespace MatcherTests {
ASSERT( m.matches( fromjson( "{\"a\":\"b\"}" ) ) );
}
};
-
+
class DoubleEqual {
public:
void run() {
BSONObj query = fromjson( "{\"a\":5}" );
Matcher m( query );
- ASSERT( m.matches( fromjson( "{\"a\":5}" ) ) );
+ ASSERT( m.matches( fromjson( "{\"a\":5}" ) ) );
}
};
-
+
class MixedNumericEqual {
public:
void run() {
BSONObjBuilder query;
query.append( "a", 5 );
Matcher m( query.done() );
- ASSERT( m.matches( fromjson( "{\"a\":5}" ) ) );
- }
+ ASSERT( m.matches( fromjson( "{\"a\":5}" ) ) );
+ }
};
-
+
class MixedNumericGt {
public:
void run() {
@@ -62,16 +65,16 @@ namespace MatcherTests {
BSONObjBuilder b;
b.append( "a", 5 );
ASSERT( m.matches( b.done() ) );
- }
+ }
};
-
+
class MixedNumericIN {
public:
- void run(){
+ void run() {
BSONObj query = fromjson( "{ a : { $in : [4,6] } }" );
ASSERT_EQUALS( 4 , query["a"].embeddedObject()["$in"].embeddedObject()["0"].number() );
ASSERT_EQUALS( NumberInt , query["a"].embeddedObject()["$in"].embeddedObject()["0"].type() );
-
+
Matcher m( query );
{
@@ -92,19 +95,19 @@ namespace MatcherTests {
b.append( "a" , 4 );
ASSERT( m.matches( b.done() ) );
}
-
+
}
};
class MixedNumericEmbedded {
public:
- void run(){
+ void run() {
Matcher m( BSON( "a" << BSON( "x" << 1 ) ) );
ASSERT( m.matches( BSON( "a" << BSON( "x" << 1 ) ) ) );
ASSERT( m.matches( BSON( "a" << BSON( "x" << 1.0 ) ) ) );
}
};
-
+
class Size {
public:
void run() {
@@ -113,16 +116,38 @@ namespace MatcherTests {
ASSERT( !m.matches( fromjson( "{a:[1,2,3]}" ) ) );
ASSERT( !m.matches( fromjson( "{a:[1,2,3,'a','b']}" ) ) );
ASSERT( !m.matches( fromjson( "{a:[[1,2,3,4]]}" ) ) );
- }
+ }
+ };
+
+
+ class TimingBase {
+ public:
+ long time( const BSONObj& patt , const BSONObj& obj ) {
+ Matcher m( patt );
+ Timer t;
+ for ( int i=0; i<10000; i++ ) {
+ ASSERT( m.matches( obj ) );
+ }
+ return t.millis();
+ }
+ };
+
+ class AllTiming : public TimingBase {
+ public:
+ void run() {
+ long normal = time( BSON( "x" << 5 ) , BSON( "x" << 5 ) );
+ long all = time( BSON( "x" << BSON( "$all" << BSON_ARRAY( 5 ) ) ) , BSON( "x" << 5 ) );
+
+ cout << "normal: " << normal << " all: " << all << endl;
+ }
};
-
class All : public Suite {
public:
- All() : Suite( "matcher" ){
+ All() : Suite( "matcher" ) {
}
-
- void setupTests(){
+
+ void setupTests() {
add< Basic >();
add< DoubleEqual >();
add< MixedNumericEqual >();
@@ -130,8 +155,9 @@ namespace MatcherTests {
add< MixedNumericIN >();
add< Size >();
add< MixedNumericEmbedded >();
+ add< AllTiming >();
}
} dball;
-
+
} // namespace MatcherTests
diff --git a/dbtests/mmaptests.cpp b/dbtests/mmaptests.cpp
new file mode 100644
index 0000000..7fb6eee
--- /dev/null
+++ b/dbtests/mmaptests.cpp
@@ -0,0 +1,219 @@
+// @file mmaptests.cpp
+
+/**
+ * Copyright (C) 2008 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "../db/mongommf.h"
+#include "../util/timer.h"
+#include "dbtests.h"
+
+namespace MMapTests {
+
+ class LeakTest {
+ const string fn;
+ const int optOld;
+ public:
+ LeakTest() :
+ fn( (path(dbpath) / "testfile.map").string() ), optOld(cmdLine.durOptions)
+ {
+ cmdLine.durOptions = 0; // DurParanoid doesn't make sense with this test
+ }
+ ~LeakTest() {
+ cmdLine.durOptions = optOld;
+ try { boost::filesystem::remove(fn); }
+ catch(...) { }
+ }
+ void run() {
+
+ try { boost::filesystem::remove(fn); }
+ catch(...) { }
+
+ writelock lk;
+
+ {
+ MongoMMF f;
+ unsigned long long len = 256 * 1024 * 1024;
+ assert( f.create(fn, len, /*sequential*/false) );
+ {
+ char *p = (char *) f.getView();
+ assert(p);
+ // write something to the private view as a test
+ if( cmdLine.dur )
+ MemoryMappedFile::makeWritable(p, 6);
+ strcpy(p, "hello");
+ }
+ if( cmdLine.dur ) {
+ char *w = (char *) f.view_write();
+ strcpy(w + 6, "world");
+ }
+ MongoFileFinder ff;
+ ASSERT( ff.findByPath(fn) );
+ ASSERT( ff.findByPath("asdf") == 0 );
+ }
+ {
+ MongoFileFinder ff;
+ ASSERT( ff.findByPath(fn) == 0 );
+ }
+
+ int N = 10000;
+#if !defined(_WIN32) && !defined(__linux__)
+ // seems this test is slow on OS X.
+ N = 100;
+#endif
+
+ // we make a lot here -- if we were leaking, presumably it would fail doing this many.
+ Timer t;
+ for( int i = 0; i < N; i++ ) {
+ MongoMMF f;
+ assert( f.open(fn, i%4==1) );
+ {
+ char *p = (char *) f.getView();
+ assert(p);
+ if( cmdLine.dur )
+ MemoryMappedFile::makeWritable(p, 4);
+ strcpy(p, "zzz");
+ }
+ if( cmdLine.dur ) {
+ char *w = (char *) f.view_write();
+ if( i % 2 == 0 )
+ ++(*w);
+ assert( w[6] == 'w' );
+ }
+ }
+ if( t.millis() > 10000 ) {
+ log() << "warning: MMap LeakTest is unusually slow N:" << N << ' ' << t.millis() << "ms" << endl;
+ }
+
+ }
+ };
+
+ class All : public Suite {
+ public:
+ All() : Suite( "mmap" ) {}
+ void setupTests() {
+ add< LeakTest >();
+ }
+ } myall;
+
+#if 0
+
+ class CopyOnWriteSpeedTest {
+ public:
+ void run() {
+
+ string fn = "/tmp/testfile.map";
+ boost::filesystem::remove(fn);
+
+ MemoryMappedFile f;
+ char *p = (char *) f.create(fn, 1024 * 1024 * 1024, true);
+ assert(p);
+ strcpy(p, "hello");
+
+ {
+ void *x = f.testGetCopyOnWriteView();
+ Timer tt;
+ for( int i = 11; i < 1000000000; i++ )
+ p[i] = 'z';
+ cout << "fill 1GB time: " << tt.millis() << "ms" << endl;
+ f.testCloseCopyOnWriteView(x);
+ }
+
+ /* test a lot of view/unviews */
+ {
+ Timer t;
+
+ char *q;
+ for( int i = 0; i < 1000; i++ ) {
+ q = (char *) f.testGetCopyOnWriteView();
+ assert( q );
+ if( i == 999 ) {
+ strcpy(q+2, "there");
+ }
+ f.testCloseCopyOnWriteView(q);
+ }
+
+ cout << "view unview: " << t.millis() << "ms" << endl;
+ }
+
+ f.flush(true);
+
+ /* plain old mmaped writes */
+ {
+ Timer t;
+ for( int i = 0; i < 10; i++ ) {
+ memset(p+100, 'c', 200 * 1024 * 1024);
+ }
+ cout << "traditional writes: " << t.millis() << "ms" << endl;
+ }
+
+ f.flush(true);
+
+ /* test doing some writes */
+ {
+ Timer t;
+ char *q = (char *) f.testGetCopyOnWriteView();
+ for( int i = 0; i < 10; i++ ) {
+ assert( q );
+ memset(q+100, 'c', 200 * 1024 * 1024);
+ }
+ f.testCloseCopyOnWriteView(q);
+
+ cout << "inc style some writes: " << t.millis() << "ms" << endl;
+ }
+
+ /* test doing some writes */
+ {
+ Timer t;
+ for( int i = 0; i < 10; i++ ) {
+ char *q = (char *) f.testGetCopyOnWriteView();
+ assert( q );
+ memset(q+100, 'c', 200 * 1024 * 1024);
+ f.testCloseCopyOnWriteView(q);
+ }
+
+ cout << "some writes: " << t.millis() << "ms" << endl;
+ }
+
+ /* more granular */
+ {
+ Timer t;
+ for( int i = 0; i < 100; i++ ) {
+ char *q = (char *) f.testGetCopyOnWriteView();
+ assert( q );
+ memset(q+100, 'c', 20 * 1024 * 1024);
+ f.testCloseCopyOnWriteView(q);
+ }
+
+ cout << "more granular some writes: " << t.millis() << "ms" << endl;
+ }
+
+ p[10] = 0;
+ cout << p << endl;
+ }
+ };
+
+ class All : public Suite {
+ public:
+ All() : Suite( "mmap" ) {}
+ void setupTests() {
+ add< CopyOnWriteSpeedTest >();
+ }
+ } myall;
+
+#endif
+
+}
diff --git a/dbtests/mockdbclient.h b/dbtests/mockdbclient.h
index 9119075..fda0963 100644
--- a/dbtests/mockdbclient.h
+++ b/dbtests/mockdbclient.h
@@ -64,8 +64,8 @@ public:
virtual void afterCommand() {}
};
DirectDBClientConnection( ReplPair *rp, ConnectionCallback *cc = 0 ) :
- rp_( rp ),
- cc_( cc ) {
+ rp_( rp ),
+ cc_( cc ) {
}
virtual BSONObj findOne(const string &ns, const Query& query, const BSONObj *fieldsToReturn = 0, int queryOptions = 0) {
BSONObj c = query.obj.copy();
diff --git a/dbtests/namespacetests.cpp b/dbtests/namespacetests.cpp
index ca051fe..c2be0b0 100644
--- a/dbtests/namespacetests.cpp
+++ b/dbtests/namespacetests.cpp
@@ -32,7 +32,7 @@ namespace NamespaceTests {
dblock lk;
Client::Context _context;
public:
- Base() : _context(ns()){
+ Base() : _context(ns()) {
}
virtual ~Base() {
if ( id_.info.isNull() )
@@ -323,7 +323,7 @@ namespace NamespaceTests {
return k.obj();
}
};
-
+
class ArraySubobjectSingleMissing : public Base {
public:
void run() {
@@ -336,7 +336,7 @@ namespace NamespaceTests {
elts.push_back( simpleBC( i ) );
BSONObjBuilder b;
b.append( "a", elts );
-
+
BSONObjSetDefaultOrder keys;
id().getKeysFromObject( b.done(), keys );
checkSize( 4, keys );
@@ -353,7 +353,7 @@ namespace NamespaceTests {
return aDotB();
}
};
-
+
class ArraySubobjectMissing : public Base {
public:
void run() {
@@ -376,7 +376,7 @@ namespace NamespaceTests {
return aDotB();
}
};
-
+
class MissingField : public Base {
public:
void run() {
@@ -391,7 +391,7 @@ namespace NamespaceTests {
return BSON( "a" << 1 );
}
};
-
+
class SubobjectMissing : public Base {
public:
void run() {
@@ -406,12 +406,12 @@ namespace NamespaceTests {
return aDotB();
}
};
-
+
class CompoundMissing : public Base {
public:
- void run(){
+ void run() {
create();
-
+
{
BSONObjSetDefaultOrder keys;
id().getKeysFromObject( fromjson( "{x:'a',y:'b'}" ) , keys );
@@ -428,16 +428,16 @@ namespace NamespaceTests {
b.appendNull( "" );
assertEquals( b.obj() , *keys.begin() );
}
-
+
}
private:
virtual BSONObj key() const {
return BSON( "x" << 1 << "y" << 1 );
}
-
+
};
-
+
class ArraySubelementComplex : public Base {
public:
void run() {
@@ -508,17 +508,17 @@ namespace NamespaceTests {
return aDotB();
}
};
-
+
class EmptyArray : Base {
public:
- void run(){
+ void run() {
create();
BSONObjSetDefaultOrder keys;
id().getKeysFromObject( fromjson( "{a:[1,2]}" ), keys );
checkSize(2, keys );
keys.clear();
-
+
id().getKeysFromObject( fromjson( "{a:[1]}" ), keys );
checkSize(1, keys );
keys.clear();
@@ -535,14 +535,14 @@ namespace NamespaceTests {
class MultiEmptyArray : Base {
public:
- void run(){
+ void run() {
create();
BSONObjSetDefaultOrder keys;
id().getKeysFromObject( fromjson( "{a:1,b:[1,2]}" ), keys );
checkSize(2, keys );
keys.clear();
-
+
id().getKeysFromObject( fromjson( "{a:1,b:[1]}" ), keys );
checkSize(1, keys );
keys.clear();
@@ -551,7 +551,7 @@ namespace NamespaceTests {
//cout << "YO : " << *(keys.begin()) << endl;
checkSize(1, keys );
keys.clear();
-
+
id().getKeysFromObject( fromjson( "{a:1,b:[]}" ), keys );
checkSize(1, keys );
//cout << "YO : " << *(keys.begin()) << endl;
@@ -600,11 +600,11 @@ namespace NamespaceTests {
if ( fileNo == -1 )
continue;
for ( int j = i.ext()->firstRecord.getOfs(); j != DiskLoc::NullOfs;
- j = DiskLoc( fileNo, j ).rec()->nextOfs ) {
+ j = DiskLoc( fileNo, j ).rec()->nextOfs ) {
++count;
}
}
- ASSERT_EQUALS( count, nsd()->nrecords );
+ ASSERT_EQUALS( count, nsd()->stats.nrecords );
return count;
}
int nExtents() const {
@@ -620,7 +620,7 @@ namespace NamespaceTests {
return ns_;
}
NamespaceDetails *nsd() const {
- return nsdetails( ns() );
+ return nsdetails( ns() )->writingWithExtra();
}
static BSONObj bigObj() {
string as( 187, 'a' );
@@ -700,7 +700,7 @@ namespace NamespaceTests {
}
};
- /* test NamespaceDetails::cappedTruncateAfter(const char *ns, DiskLoc loc)
+ /* test NamespaceDetails::cappedTruncateAfter(const char *ns, DiskLoc loc)
*/
class TruncateCapped : public Base {
virtual string spec() const {
@@ -737,9 +737,9 @@ namespace NamespaceTests {
}
DiskLoc d = l[6];
- long long n = nsd->nrecords;
+ long long n = nsd->stats.nrecords;
nsd->cappedTruncateAfter(ns(), d, false);
- ASSERT_EQUALS( nsd->nrecords , n-1 );
+ ASSERT_EQUALS( nsd->stats.nrecords , n-1 );
{
ForwardCappedCursor c(nsd);
@@ -770,7 +770,7 @@ namespace NamespaceTests {
void run() {
create();
nsd()->deletedList[ 2 ] = nsd()->cappedListOfAllDeletedRecords().drec()->nextDeleted.drec()->nextDeleted;
- nsd()->cappedListOfAllDeletedRecords().drec()->nextDeleted.drec()->nextDeleted = DiskLoc();
+ nsd()->cappedListOfAllDeletedRecords().drec()->nextDeleted.drec()->nextDeleted.writing() = DiskLoc();
nsd()->cappedLastDelRecLastExtent().Null();
NamespaceDetails *d = nsd();
zero( &d->capExtent );
@@ -820,15 +820,15 @@ namespace NamespaceTests {
ASSERT_EQUALS( 496U, sizeof( NamespaceDetails ) );
}
};
-
+
} // namespace NamespaceDetailsTests
class All : public Suite {
public:
- All() : Suite( "namespace" ){
+ All() : Suite( "namespace" ) {
}
- void setupTests(){
+ void setupTests() {
add< IndexDetailsTests::Create >();
add< IndexDetailsTests::GetKeysFromObjectSimple >();
add< IndexDetailsTests::GetKeysFromObjectDotted >();
diff --git a/dbtests/pairingtests.cpp b/dbtests/pairingtests.cpp
index 68d4c0e..9cca548 100644
--- a/dbtests/pairingtests.cpp
+++ b/dbtests/pairingtests.cpp
@@ -37,7 +37,7 @@ namespace PairingTests {
~Base() {
pairSync = backup;
dblock lk;
- Helpers::emptyCollection( "local.pair.sync" );
+ Helpers::emptyCollection( "local.pair.sync" );
if ( pairSync->initialSyncCompleted() ) {
// save to db
pairSync->setInitialSyncCompleted();
@@ -63,7 +63,7 @@ namespace PairingTests {
private:
static void init() {
dblock lk;
- Helpers::emptyCollection( "local.pair.sync" );
+ Helpers::emptyCollection( "local.pair.sync" );
if ( synced != 0 && notSynced != 0 )
return;
notSynced = new PairSync();
@@ -71,7 +71,7 @@ namespace PairingTests {
synced = new PairSync();
synced->init();
synced->setInitialSyncCompleted();
- Helpers::emptyCollection( "local.pair.sync" );
+ Helpers::emptyCollection( "local.pair.sync" );
}
PairSync *backup;
static PairSync *synced;
@@ -199,24 +199,24 @@ namespace PairingTests {
TestableReplPair rp4( true, fromjson( "{ok:1,you_are:1}" ) );
rp4.arbitrate();
- ASSERT( rp4.state == ReplPair::State_Master );
+ ASSERT( rp4.state == ReplPair::State_Master );
TestableReplPair rp5( true, fromjson( "{ok:1,you_are:0}" ) );
rp5.arbitrate();
- ASSERT( rp5.state == ReplPair::State_Slave );
+ ASSERT( rp5.state == ReplPair::State_Slave );
TestableReplPair rp6( true, fromjson( "{ok:1,you_are:-1}" ) );
rp6.arbitrate();
// unchanged from initial value
- ASSERT( rp6.state == ReplPair::State_Negotiating );
+ ASSERT( rp6.state == ReplPair::State_Negotiating );
}
private:
class TestableReplPair : public ReplPair {
public:
TestableReplPair( bool connect, const BSONObj &one ) :
- ReplPair( "a", "z" ),
- connect_( connect ),
- one_( one ) {
+ ReplPair( "a", "z" ),
+ connect_( connect ),
+ one_( one ) {
}
virtual
DBClientConnection *newClientConnection() const {
@@ -326,10 +326,10 @@ namespace PairingTests {
class All : public Suite {
public:
- All() : Suite( "pairing" ){
+ All() : Suite( "pairing" ) {
}
-
- void setupTests(){
+
+ void setupTests() {
add< ReplPairTests::Create >();
add< ReplPairTests::Dominant >();
add< ReplPairTests::SetMaster >();
diff --git a/dbtests/pdfiletests.cpp b/dbtests/pdfiletests.cpp
index 7e92783..2844fc4 100644
--- a/dbtests/pdfiletests.cpp
+++ b/dbtests/pdfiletests.cpp
@@ -31,7 +31,7 @@ namespace PdfileTests {
class Base {
public:
- Base() : _context( ns() ){
+ Base() : _context( ns() ) {
}
virtual ~Base() {
if ( !nsd() )
@@ -71,6 +71,7 @@ namespace PdfileTests {
BSONObj o = b.done();
int len = o.objsize();
Extent *e = ext.ext();
+ e = getDur().writing(e);
int ofs;
if ( e->lastRecord.isNull() )
ofs = ext.getOfs() + ( e->_extentData - (char *)e );
@@ -78,6 +79,7 @@ namespace PdfileTests {
ofs = e->lastRecord.getOfs() + e->lastRecord.rec()->lengthWithHeaders;
DiskLoc dl( ext.a(), ofs );
Record *r = dl.rec();
+ r = (Record*) getDur().writingPtr(r, Record::HeaderSize + len);
r->lengthWithHeaders = Record::HeaderSize + len;
r->extentOfs = e->myLoc.getOfs();
r->nextOfs = DiskLoc::NullOfs;
@@ -86,7 +88,7 @@ namespace PdfileTests {
if ( e->firstRecord.isNull() )
e->firstRecord = dl;
else
- e->lastRecord.rec()->nextOfs = ofs;
+ getDur().writingInt(e->lastRecord.rec()->nextOfs) = ofs;
e->lastRecord = dl;
return dl;
}
@@ -110,7 +112,7 @@ namespace PdfileTests {
class EmptyLooped : public Base {
virtual void prepare() {
- nsd()->capFirstNewRecord = DiskLoc();
+ nsd()->writingWithExtra()->capFirstNewRecord = DiskLoc();
}
virtual int count() const {
return 0;
@@ -119,7 +121,7 @@ namespace PdfileTests {
class EmptyMultiExtentLooped : public Base {
virtual void prepare() {
- nsd()->capFirstNewRecord = DiskLoc();
+ nsd()->writingWithExtra()->capFirstNewRecord = DiskLoc();
}
virtual int count() const {
return 0;
@@ -131,7 +133,7 @@ namespace PdfileTests {
class Single : public Base {
virtual void prepare() {
- nsd()->capFirstNewRecord = insert( nsd()->capExtent, 0 );
+ nsd()->writingWithExtra()->capFirstNewRecord = insert( nsd()->capExtent, 0 );
}
virtual int count() const {
return 1;
@@ -140,7 +142,8 @@ namespace PdfileTests {
class NewCapFirst : public Base {
virtual void prepare() {
- nsd()->capFirstNewRecord = insert( nsd()->capExtent, 0 );
+ DiskLoc x = insert( nsd()->capExtent, 0 );
+ nsd()->writingWithExtra()->capFirstNewRecord = x;
insert( nsd()->capExtent, 1 );
}
virtual int count() const {
@@ -151,7 +154,7 @@ namespace PdfileTests {
class NewCapLast : public Base {
virtual void prepare() {
insert( nsd()->capExtent, 0 );
- nsd()->capFirstNewRecord = insert( nsd()->capExtent, 1 );
+ nsd()->capFirstNewRecord.writing() = insert( nsd()->capExtent, 1 );
}
virtual int count() const {
return 2;
@@ -161,7 +164,7 @@ namespace PdfileTests {
class NewCapMiddle : public Base {
virtual void prepare() {
insert( nsd()->capExtent, 0 );
- nsd()->capFirstNewRecord = insert( nsd()->capExtent, 1 );
+ nsd()->capFirstNewRecord.writing() = insert( nsd()->capExtent, 1 );
insert( nsd()->capExtent, 2 );
}
virtual int count() const {
@@ -173,7 +176,7 @@ namespace PdfileTests {
virtual void prepare() {
insert( nsd()->capExtent, 0 );
insert( nsd()->lastExtent, 1 );
- nsd()->capFirstNewRecord = insert( nsd()->capExtent, 2 );
+ nsd()->capFirstNewRecord.writing() = insert( nsd()->capExtent, 2 );
insert( nsd()->capExtent, 3 );
}
virtual int count() const {
@@ -186,10 +189,10 @@ namespace PdfileTests {
class LastExtent : public Base {
virtual void prepare() {
- nsd()->capExtent = nsd()->lastExtent;
+ nsd()->capExtent.writing() = nsd()->lastExtent;
insert( nsd()->capExtent, 0 );
insert( nsd()->firstExtent, 1 );
- nsd()->capFirstNewRecord = insert( nsd()->capExtent, 2 );
+ nsd()->capFirstNewRecord.writing() = insert( nsd()->capExtent, 2 );
insert( nsd()->capExtent, 3 );
}
virtual int count() const {
@@ -202,11 +205,11 @@ namespace PdfileTests {
class MidExtent : public Base {
virtual void prepare() {
- nsd()->capExtent = nsd()->firstExtent.ext()->xnext;
+ nsd()->capExtent.writing() = nsd()->firstExtent.ext()->xnext;
insert( nsd()->capExtent, 0 );
insert( nsd()->lastExtent, 1 );
insert( nsd()->firstExtent, 2 );
- nsd()->capFirstNewRecord = insert( nsd()->capExtent, 3 );
+ nsd()->capFirstNewRecord.writing() = insert( nsd()->capExtent, 3 );
insert( nsd()->capExtent, 4 );
}
virtual int count() const {
@@ -219,10 +222,10 @@ namespace PdfileTests {
class AloneInExtent : public Base {
virtual void prepare() {
- nsd()->capExtent = nsd()->firstExtent.ext()->xnext;
+ nsd()->capExtent.writing() = nsd()->firstExtent.ext()->xnext;
insert( nsd()->lastExtent, 0 );
insert( nsd()->firstExtent, 1 );
- nsd()->capFirstNewRecord = insert( nsd()->capExtent, 2 );
+ nsd()->capFirstNewRecord.writing() = insert( nsd()->capExtent, 2 );
}
virtual int count() const {
return 3;
@@ -234,10 +237,10 @@ namespace PdfileTests {
class FirstInExtent : public Base {
virtual void prepare() {
- nsd()->capExtent = nsd()->firstExtent.ext()->xnext;
+ nsd()->capExtent.writing() = nsd()->firstExtent.ext()->xnext;
insert( nsd()->lastExtent, 0 );
insert( nsd()->firstExtent, 1 );
- nsd()->capFirstNewRecord = insert( nsd()->capExtent, 2 );
+ nsd()->capFirstNewRecord.writing() = insert( nsd()->capExtent, 2 );
insert( nsd()->capExtent, 3 );
}
virtual int count() const {
@@ -250,11 +253,11 @@ namespace PdfileTests {
class LastInExtent : public Base {
virtual void prepare() {
- nsd()->capExtent = nsd()->firstExtent.ext()->xnext;
+ nsd()->capExtent.writing() = nsd()->firstExtent.ext()->xnext;
insert( nsd()->capExtent, 0 );
insert( nsd()->lastExtent, 1 );
insert( nsd()->firstExtent, 2 );
- nsd()->capFirstNewRecord = insert( nsd()->capExtent, 3 );
+ nsd()->capFirstNewRecord.writing() = insert( nsd()->capExtent, 3 );
}
virtual int count() const {
return 4;
@@ -265,11 +268,11 @@ namespace PdfileTests {
};
} // namespace ScanCapped
-
+
namespace Insert {
class Base {
public:
- Base() : _context( ns() ){
+ Base() : _context( ns() ) {
}
virtual ~Base() {
if ( !nsd() )
@@ -288,7 +291,7 @@ namespace PdfileTests {
dblock lk_;
Client::Context _context;
};
-
+
class UpdateDate : public Base {
public:
void run() {
@@ -301,12 +304,86 @@ namespace PdfileTests {
}
};
} // namespace Insert
-
+
+ class ExtentSizing {
+ public:
+ struct SmallFilesControl {
+ SmallFilesControl() {
+ old = cmdLine.smallfiles;
+ cmdLine.smallfiles = false;
+ }
+ ~SmallFilesControl() {
+ cmdLine.smallfiles = old;
+ }
+ bool old;
+ };
+ void run() {
+ SmallFilesControl c;
+ // test that no matter what we start with, we always get to max extent size
+ for ( int obj=16; obj<BSONObjMaxUserSize; obj += 111 ) {
+ int sz = Extent::initialSize( obj );
+ for ( int i=0; i<100; i++ ) {
+ sz = Extent::followupSize( obj , sz );
+ }
+ ASSERT_EQUALS( Extent::maxSize() , sz );
+ }
+ }
+ };
+
+ class ExtentAllocOrder {
+ public:
+ void run() {
+ string dbname = "unittest_ex";
+
+ string c1 = dbname + ".x1";
+ string c2 = dbname + ".x2";
+
+ {
+ DBDirectClient db;
+ db.dropDatabase( dbname );
+ }
+
+ dblock mylock;
+ Client::Context cx( dbname );
+
+ bool isnew;
+ Database * d = dbHolder.getOrCreate( dbname , dbpath , isnew );
+ assert( d );
+
+ int big = 10 * 1024;
+ //int small = 1024;
+
+ unsigned long long l = 0;
+ int n = 0;
+ while ( 1 ) {
+ n++;
+ if( n == 5 && sizeof(void*)==4 )
+ break;
+ MongoDataFile * f = d->addAFile( big , false );
+ cout << f->length() << ' ' << n << endl;
+ if ( f->length() == l )
+ break;
+ l = f->length();
+ }
+
+ int start = d->numFiles();
+ for ( int i=0; i<start; i++ )
+ d->allocExtent( c1.c_str() , d->getFile( i )->getHeader()->unusedLength , false );
+ ASSERT_EQUALS( start , d->numFiles() );
+
+ {
+ DBDirectClient db;
+ db.dropDatabase( dbname );
+ }
+ }
+ };
+
+
class All : public Suite {
public:
- All() : Suite( "pdfile" ){}
-
- void setupTests(){
+ All() : Suite( "pdfile" ) {}
+
+ void setupTests() {
add< ScanCapped::Empty >();
add< ScanCapped::EmptyLooped >();
add< ScanCapped::EmptyMultiExtentLooped >();
@@ -321,6 +398,8 @@ namespace PdfileTests {
add< ScanCapped::FirstInExtent >();
add< ScanCapped::LastInExtent >();
add< Insert::UpdateDate >();
+ add< ExtentSizing >();
+ add< ExtentAllocOrder >();
}
} myall;
diff --git a/dbtests/perf/btreeperf.cpp b/dbtests/perf/btreeperf.cpp
new file mode 100644
index 0000000..7d68d8f
--- /dev/null
+++ b/dbtests/perf/btreeperf.cpp
@@ -0,0 +1,442 @@
+// btreeperf.cpp
+
+/* Copyright 2010 10gen Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Performance timing and space utilization testing for btree indexes.
+ */
+
+#include <iostream>
+
+#include <boost/random/bernoulli_distribution.hpp>
+#include <boost/random/geometric_distribution.hpp>
+#include <boost/random/mersenne_twister.hpp>
+#include <boost/random/variate_generator.hpp>
+#include <boost/random/uniform_int.hpp>
+
+#include "client/dbclient.h"
+#include "../../util/timer.h"
+
+using namespace std;
+using namespace mongo;
+using namespace boost;
+
+const char *ns = "test.btreeperf";
+const char *db = "test";
+const char *index_collection = "btreeperf.$_id_";
+
+// This random number generator has a much larger period than the default
+// generator and is half as fast as the default. Given that we intend to
+// generate large numbers of documents and will utilize more than one random
+// sample per document, choosing this generator seems like a worthwhile tradeoff.
+mt19937 randomNumberGenerator;
+
+/**
+ * An interface for generating documents to be inserted and document specs for
+ * remove requests.
+ */
+class InsertAndRemoveStrategy {
+public:
+ virtual ~InsertAndRemoveStrategy() {}
+ virtual BSONObj insertObj() = 0;
+ virtual BSONObj removeObj() = 0;
+protected:
+ /**
+ * Helper functions for converting a sample value to a sample object with
+ * specified _id, to be inserted or removed.
+ */
+
+ template< class T >
+ BSONObj insertObjWithVal( const T &val ) {
+ BSONObjBuilder b;
+ b.append( "_id", val );
+ return b.obj();
+ }
+ template< class T >
+ BSONObj removeObjWithVal( const T &val ) {
+ BSONObjBuilder b;
+ b.append( "_id", val );
+ return b.obj();
+ }
+};
+
+/**
+ * Manages a set of elements of type T. Supports inserting unique elements and
+ * sampling a random element without replacement.
+ *
+ * TODO In the contexts where this class is currently used, duplicate keys are
+ * either impossible or highly unlikely. And an occasional duplicate value will
+ * not much affect the procedure by wich a random element is chosen. We could
+ * stop checking for duplicates in push(), eliminate _set from the implementaiton,
+ * and potentially improve performance and memory requirements somewhat.
+ */
+template< class T >
+class SetSampler {
+public:
+ /** @param val Insert this value in the set if not already present. */
+ void push( const T& val ) {
+ if ( _set.insert( val ).second ) {
+ _vector.push_back( val );
+ }
+ }
+ /** @return a random element removed from the set */
+ T pull() {
+ if ( _vector.size() == 0 ) {
+ return T();
+ }
+ uniform_int< size_t > sizeRange( 0, _vector.size() - 1 );
+ variate_generator< mt19937&, uniform_int< size_t > > sizeGenerator( randomNumberGenerator, sizeRange );
+ size_t toRemove = sizeGenerator();
+ T val = _vector[ toRemove ];
+ // Replace the random element with the last element, then remove the
+ // last element.
+ _vector[ toRemove ] = _vector.back();
+ _vector.pop_back();
+ _set.erase( val );
+ return val;
+ }
+private:
+ vector< T > _vector;
+ set< T > _set;
+};
+
+/**
+ * Tracks values that have been specified for insertion by the derived class's
+ * implementation of insertVal() and selects uniformally from among values that
+ * have been inserted but not yet removed for the next value to remove.
+ *
+ * The implementation is probabilistically sound, but may be resource intensive
+ * and slow due to the use of a SetSampler.
+ */
+template< class T >
+class InsertAndUniformRemoveStrategy : public InsertAndRemoveStrategy {
+public:
+ virtual BSONObj insertObj() {
+ T val = insertVal();
+ _sampler.push( val );
+ return insertObjWithVal( val );
+ }
+ virtual BSONObj removeObj() { return removeObjWithVal( _sampler.pull() ); }
+protected:
+ /** @return value to insert. This is the only function a derived class need implement. */
+ virtual T insertVal() = 0;
+private:
+ SetSampler< T > _sampler;
+};
+
+/**
+ * The derived class supplies keys to be inserted and removed. The key removal
+ * strategy is similar to the strategy for selecting a random element described
+ * in the MongoDB cookbook: the first key in the collection greater than or
+ * equal to the supplied removal key is removed. This allows selecting an
+ * exising key for removal without the overhead required by a SetSampler.
+ *
+ * While this ranged selection strategy can work well for selecting a random
+ * element, there are some theoretical and empirically observed shortcomings
+ * when the strategy is applied to removing nodes for btree performance measurement:
+ * 1 The likelihood that a given key is removed is proportional to the difference
+ * in value between it and the previous key. Because key deletion increases
+ * the difference in value between adjacent keys, neighboring keys will be
+ * more likely to be deleted than they would be in a true uniform distribution.
+ * 2 MongoDB 1.6 uses 'unused' nodes in the btree implementation. With a ranged
+ * removal strategy, those nodes must be traversed to find a node available
+ * for removal.
+ * 3 Ranged removal was observed to be biased against the balancing policy of
+ * MongoDB 1.7 in some cases, in terms of storage size. This may be a
+ * consequence of point 1 above.
+ * 4 Ranged removal was observed to be significantly biased against the btree
+ * implementation in MongoDB 1.6 in terms of performance. This is likely a
+ * consequence of point 2 above.
+ * 5 In some cases the biases described above were not evident in tests lasting
+ * several minutes, but were evident in tests lasting several hours.
+ */
+template< class T >
+class InsertAndRangedRemoveStrategy : public InsertAndRemoveStrategy {
+public:
+ virtual BSONObj insertObj() { return insertObjWithVal( insertVal() ); }
+ virtual BSONObj removeObj() { return rangedRemoveObjWithVal( removeVal() ); }
+protected:
+ /** Small likelihood that this removal spec will not match any document */
+ template< class U >
+ BSONObj rangedRemoveObjWithVal( const U &val ) {
+ BSONObjBuilder b1;
+ BSONObjBuilder b2( b1.subobjStart( "_id" ) );
+ b2.append( "$gte", val );
+ b2.done();
+ return b1.obj();
+ }
+ virtual T insertVal() = 0;
+ virtual T removeVal() = 0;
+};
+
+/**
+ * Integer Keys
+ * Uniform Inserts
+ * Uniform Removes
+ */
+class UniformInsertRangedUniformRemoveInteger : public InsertAndRangedRemoveStrategy< long long > {
+public:
+ UniformInsertRangedUniformRemoveInteger() :
+ _uniform_int( 0ULL, ~0ULL ),
+ _nextLongLong( randomNumberGenerator, _uniform_int ) {
+ }
+ /** Small likelihood of duplicates */
+ virtual long long insertVal() { return _nextLongLong(); }
+ virtual long long removeVal() { return _nextLongLong(); }
+private:
+ uniform_int< unsigned long long > _uniform_int;
+ variate_generator< mt19937&, uniform_int< unsigned long long > > _nextLongLong;
+};
+
+class UniformInsertUniformRemoveInteger : public InsertAndUniformRemoveStrategy< long long > {
+public:
+ virtual long long insertVal() { return _gen.insertVal(); }
+private:
+ UniformInsertRangedUniformRemoveInteger _gen;
+};
+
+/**
+ * String Keys
+ * Uniform Inserts
+ * Uniform Removes
+ */
+class UniformInsertRangedUniformRemoveString : public InsertAndRangedRemoveStrategy< string > {
+public:
+ UniformInsertRangedUniformRemoveString() :
+ _geometric_distribution( 0.9 ),
+ _nextLength( randomNumberGenerator, _geometric_distribution ),
+ _uniform_char( 'a', 'z' ),
+ _nextChar( randomNumberGenerator, _uniform_char ) {
+ }
+ /** Small likelihood of duplicates */
+ virtual string insertVal() { return nextString(); }
+ virtual string removeVal() { return nextString(); }
+private:
+ string nextString() {
+ // The longer the minimum string length, the lower the likelihood of duplicates
+ int len = _nextLength() + 5;
+ len = len > 100 ? 100 : len;
+ string ret( len, 'x' );
+ for( int i = 0; i < len; ++i ) {
+ ret[ i ] = _nextChar();
+ }
+ return ret;
+ }
+ geometric_distribution<> _geometric_distribution;
+ variate_generator< mt19937&, geometric_distribution<> > _nextLength;
+ uniform_int< char > _uniform_char;
+ variate_generator< mt19937&, uniform_int< char > > _nextChar;
+};
+
+class UniformInsertUniformRemoveString : public InsertAndUniformRemoveStrategy< string > {
+public:
+ virtual string insertVal() { return _gen.insertVal(); }
+private:
+ UniformInsertRangedUniformRemoveString _gen;
+};
+
+/**
+ * OID Keys
+ * Increasing Inserts
+ * Uniform Removes
+ */
+class IncreasingInsertRangedUniformRemoveOID : public InsertAndRangedRemoveStrategy< OID > {
+public:
+ IncreasingInsertRangedUniformRemoveOID() :
+ _max( -1 ) {
+ }
+ virtual OID insertVal() { return oidFromULL( ++_max ); }
+ virtual OID removeVal() {
+ uniform_int< unsigned long long > distribution( 0, _max > 0 ? _max : 0 );
+ variate_generator< mt19937&, uniform_int< unsigned long long > > generator( randomNumberGenerator, distribution );
+ return oidFromULL( generator() );
+ }
+private:
+ static OID oidFromULL( unsigned long long val ) {
+ val = __builtin_bswap64( val );
+ OID oid;
+ oid.clear();
+ memcpy( (char*)&oid + 4, &val, 8 );
+ return oid;
+ }
+ long long _max;
+};
+
+class IncreasingInsertUniformRemoveOID : public InsertAndUniformRemoveStrategy< OID > {
+public:
+ virtual OID insertVal() { return _gen.insertVal(); }
+private:
+ IncreasingInsertRangedUniformRemoveOID _gen;
+};
+
+/**
+ * Integer Keys
+ * Increasing Inserts
+ * Increasing Removes (on remove, the lowest key is always removed)
+ */
+class IncreasingInsertIncreasingRemoveInteger : public InsertAndRemoveStrategy {
+public:
+ IncreasingInsertIncreasingRemoveInteger() :
+ // Start with a large value so data type will be preserved if we round
+ // trip through json.
+ _min( 1LL << 32 ),
+ _max( 1LL << 32 ) {
+ }
+ virtual BSONObj insertObj() { return insertObjWithVal( ++_max ); }
+ virtual BSONObj removeObj() { return removeObjWithVal( _min < _max ? ++_min : _min ); }
+private:
+ long long _min;
+ long long _max;
+};
+
+/** Generate a random boolean value. */
+class BernoulliGenerator {
+public:
+ /**
+ * @param excessFalsePercent This specifies the desired rate of false values
+ * vs true values. If we want false to be 5% more likely than true, we
+ * specify 5 for this argument.
+ */
+ BernoulliGenerator( int excessFalsePercent ) :
+ _bernoulli_distribution( 1.0 / ( 2.0 + excessFalsePercent / 100.0 ) ),
+ _generator( randomNumberGenerator, _bernoulli_distribution ) {
+ }
+ bool operator()() { return _generator(); }
+private:
+ bernoulli_distribution<> _bernoulli_distribution;
+ variate_generator< mt19937&, bernoulli_distribution<> > _generator;
+};
+
+/** Runs a strategy on a connection, with specified mix of inserts and removes. */
+class InsertAndRemoveRunner {
+public:
+ InsertAndRemoveRunner( DBClientConnection &conn, InsertAndRemoveStrategy &strategy, int excessInsertPercent ) :
+ _conn( conn ),
+ _strategy( strategy ),
+ _nextOpTypeRemove( excessInsertPercent ) {
+ }
+ void writeOne() {
+ if ( _nextOpTypeRemove() ) {
+ _conn.remove( ns, _strategy.removeObj(), true );
+ }
+ else {
+ _conn.insert( ns, _strategy.insertObj() );
+ }
+ }
+private:
+ DBClientConnection &_conn;
+ InsertAndRemoveStrategy &_strategy;
+ BernoulliGenerator _nextOpTypeRemove;
+};
+
+/**
+ * Writes a test script to cout based on a strategy and specified mix of inserts
+ * and removes. The script can be subsequently executed by InsertAndRemoveRunner.
+ * Script generation is intended for strategies that are memory or cpu intensive
+ * and might either divert resources from a mongod instance being analyzed on the
+ * same machine or fail to generate requests as quickly as the mongod might
+ * accept them.
+ * The script contains one line per operation. Each line begins
+ * with a letter indicating the operation type, followed by a space. Next
+ * follows the json representation of a document for the specified operation
+ * type.
+ */
+class InsertAndRemoveScriptGenerator {
+public:
+ InsertAndRemoveScriptGenerator( InsertAndRemoveStrategy &strategy, int excessInsertPercent ) :
+ _strategy( strategy ),
+ _nextOpTypeRemove( excessInsertPercent ) {
+ }
+ void writeOne() {
+ if ( _nextOpTypeRemove() ) {
+ cout << "r " << _strategy.removeObj().jsonString() << endl;
+ }
+ else {
+ cout << "i " << _strategy.insertObj().jsonString() << endl;
+ }
+ }
+private:
+ InsertAndRemoveStrategy &_strategy;
+ BernoulliGenerator _nextOpTypeRemove;
+};
+
+/**
+ * Run a test script from cin that was generated by
+ * InsertAndRemoveScriptGenerator. Running the script is intended to be
+ * lightweight in terms of memory and cpu usage, and fast.
+ */
+class InsertAndRemoveScriptRunner {
+public:
+ InsertAndRemoveScriptRunner( DBClientConnection &conn ) :
+ _conn( conn ) {
+ }
+ void writeOne() {
+ cin.getline( _buf, 1024 );
+ BSONObj val = fromjson( _buf + 2 );
+ if ( _buf[ 0 ] == 'r' ) {
+ _conn.remove( ns, val, true );
+ }
+ else {
+ _conn.insert( ns, val );
+ }
+ }
+private:
+ DBClientConnection &_conn;
+ char _buf[ 1024 ];
+};
+
+int main( int argc, const char **argv ) {
+
+ DBClientConnection conn;
+ conn.connect( "127.0.0.1:27017" );
+ conn.dropCollection( ns );
+
+// UniformInsertRangedUniformRemoveInteger strategy;
+// UniformInsertUniformRemoveInteger strategy;
+// UniformInsertRangedUniformRemoveString strategy;
+// UniformInsertUniformRemoveString strategy;
+// IncreasingInsertRangedUniformRemoveOID strategy;
+// IncreasingInsertUniformRemoveOID strategy;
+// IncreasingInsertIncreasingRemoveInteger strategy;
+// InsertAndRemoveScriptGenerator runner( strategy, 5 );
+ InsertAndRemoveScriptRunner runner( conn );
+
+ Timer t;
+ BSONObj statsCmd = BSON( "collstats" << index_collection );
+
+ // Print header, unless we are generating a script (in that case, comment this out).
+ cout << "ops,milliseconds,docs,totalBucketSize" << endl;
+
+ long long i = 0;
+ long long n = 10000000000;
+ while( i < n ) {
+ runner.writeOne();
+ // Print statistics, unless we are generating a script (in that case, comment this out).
+ // The stats collection requests below provide regular read operations,
+ // ensuring we are caught up with the progress being made by the mongod
+ // under analysis.
+ if ( ++i % 50000 == 0 ) {
+ // The total number of documents present.
+ long long docs = conn.count( ns );
+ BSONObj result;
+ conn.runCommand( db, statsCmd, result );
+ // The total number of bytes used for all allocated 8K buckets of the
+ // btree.
+ long long totalBucketSize = result.getField( "count" ).numberLong() * 8192;
+ cout << i << ',' << t.millis() << ',' << docs << ',' << totalBucketSize << endl;
+ }
+ }
+}
diff --git a/dbtests/perf/perftest.cpp b/dbtests/perf/perftest.cpp
index f86a1c3..ef03551 100644
--- a/dbtests/perf/perftest.cpp
+++ b/dbtests/perf/perftest.cpp
@@ -74,14 +74,14 @@ public:
<< "}" << endl;
}
~Runner() {
- theFileAllocator().waitUntilFinished();
+ FileAllocator::get()->waitUntilFinished();
client_->dropDatabase( testDb< T >().c_str() );
}
};
class RunnerSuite : public Suite {
public:
- RunnerSuite( string name ) : Suite( name ){}
+ RunnerSuite( string name ) : Suite( name ) {}
protected:
template< class T >
void add() {
@@ -168,9 +168,9 @@ namespace Insert {
class All : public RunnerSuite {
public:
- All() : RunnerSuite( "insert" ){}
+ All() : RunnerSuite( "insert" ) {}
- void setupTests(){
+ void setupTests() {
add< IdIndex >();
add< TwoIndex >();
add< TenIndex >();
@@ -252,8 +252,8 @@ namespace Update {
class All : public RunnerSuite {
public:
- All() : RunnerSuite( "update" ){}
- void setupTests(){
+ All() : RunnerSuite( "update" ) {}
+ void setupTests() {
add< Smaller >();
add< Bigger >();
add< Inc >();
@@ -266,33 +266,33 @@ namespace Update {
namespace BSON {
const char *sample =
- "{\"one\":2, \"two\":5, \"three\": {},"
- "\"four\": { \"five\": { \"six\" : 11 } },"
- "\"seven\": [ \"a\", \"bb\", \"ccc\", 5 ],"
- "\"eight\": Dbref( \"rrr\", \"01234567890123456789aaaa\" ),"
- "\"_id\": ObjectId( \"deadbeefdeadbeefdeadbeef\" ),"
- "\"nine\": { \"$binary\": \"abc=\", \"$type\": \"02\" },"
- "\"ten\": Date( 44 ), \"eleven\": /foooooo/i }";
+ "{\"one\":2, \"two\":5, \"three\": {},"
+ "\"four\": { \"five\": { \"six\" : 11 } },"
+ "\"seven\": [ \"a\", \"bb\", \"ccc\", 5 ],"
+ "\"eight\": Dbref( \"rrr\", \"01234567890123456789aaaa\" ),"
+ "\"_id\": ObjectId( \"deadbeefdeadbeefdeadbeef\" ),"
+ "\"nine\": { \"$binary\": \"abc=\", \"$type\": \"02\" },"
+ "\"ten\": Date( 44 ), \"eleven\": /foooooo/i }";
const char *shopwikiSample =
- "{ '_id' : '289780-80f85380b5c1d4a0ad75d1217673a4a2' , 'site_id' : 289780 , 'title'"
- ": 'Jubilee - Margaret Walker' , 'image_url' : 'http://www.heartlanddigsandfinds.c"
- "om/store/graphics/Product_Graphics/Product_8679.jpg' , 'url' : 'http://www.heartla"
- "nddigsandfinds.com/store/store_product_detail.cfm?Product_ID=8679&Category_ID=2&Su"
- "b_Category_ID=910' , 'url_hash' : 3450626119933116345 , 'last_update' : null , '"
- "features' : { '$imagePrefetchDate' : '2008Aug30 22:39' , '$image.color.rgb' : '5a7"
- "574' , 'Price' : '$10.99' , 'Description' : 'Author--s 1st Novel. A Houghton Miffl"
- "in Literary Fellowship Award novel by the esteemed poet and novelist who has demon"
- "strated a lifelong commitment to the heritage of black culture. An acclaimed story"
- "of Vyry, a negro slave during the 19th Century, facing the biggest challenge of h"
- "er lifetime - that of gaining her freedom, fighting for all the things she had nev"
- "er known before. The author, great-granddaughter of Vyry, reveals what the Civil W"
- "ar in America meant to the Negroes. Slavery W' , '$priceHistory-1' : '2008Dec03 $1"
- "0.99' , 'Brand' : 'Walker' , '$brands_in_title' : 'Walker' , '--path' : '//HTML[1]"
- "/BODY[1]/TABLE[1]/TR[1]/TD[1]/P[1]/TABLE[1]/TR[1]/TD[1]/TABLE[1]/TR[2]/TD[2]/TABLE"
- "[1]/TR[1]/TD[1]/P[1]/TABLE[1]/TR[1]' , '~location' : 'en_US' , '$crawled' : '2009J"
- "an11 03:22' , '$priceHistory-2' : '2008Nov15 $10.99' , '$priceHistory-0' : '2008De"
- "c24 $10.99'}}";
+ "{ '_id' : '289780-80f85380b5c1d4a0ad75d1217673a4a2' , 'site_id' : 289780 , 'title'"
+ ": 'Jubilee - Margaret Walker' , 'image_url' : 'http://www.heartlanddigsandfinds.c"
+ "om/store/graphics/Product_Graphics/Product_8679.jpg' , 'url' : 'http://www.heartla"
+ "nddigsandfinds.com/store/store_product_detail.cfm?Product_ID=8679&Category_ID=2&Su"
+ "b_Category_ID=910' , 'url_hash' : 3450626119933116345 , 'last_update' : null , '"
+ "features' : { '$imagePrefetchDate' : '2008Aug30 22:39' , '$image.color.rgb' : '5a7"
+ "574' , 'Price' : '$10.99' , 'Description' : 'Author--s 1st Novel. A Houghton Miffl"
+ "in Literary Fellowship Award novel by the esteemed poet and novelist who has demon"
+ "strated a lifelong commitment to the heritage of black culture. An acclaimed story"
+ "of Vyry, a negro slave during the 19th Century, facing the biggest challenge of h"
+ "er lifetime - that of gaining her freedom, fighting for all the things she had nev"
+ "er known before. The author, great-granddaughter of Vyry, reveals what the Civil W"
+ "ar in America meant to the Negroes. Slavery W' , '$priceHistory-1' : '2008Dec03 $1"
+ "0.99' , 'Brand' : 'Walker' , '$brands_in_title' : 'Walker' , '--path' : '//HTML[1]"
+ "/BODY[1]/TABLE[1]/TR[1]/TD[1]/P[1]/TABLE[1]/TR[1]/TD[1]/TABLE[1]/TR[2]/TD[2]/TABLE"
+ "[1]/TR[1]/TD[1]/P[1]/TABLE[1]/TR[1]' , '~location' : 'en_US' , '$crawled' : '2009J"
+ "an11 03:22' , '$priceHistory-2' : '2008Nov15 $10.99' , '$priceHistory-0' : '2008De"
+ "c24 $10.99'}}";
class Parse {
public:
@@ -332,8 +332,8 @@ namespace BSON {
class All : public RunnerSuite {
public:
- All() : RunnerSuite( "bson" ){}
- void setupTests(){
+ All() : RunnerSuite( "bson" ) {}
+ void setupTests() {
add< Parse >();
add< ShopwikiParse >();
add< Json >();
@@ -402,8 +402,8 @@ namespace Index {
class All : public RunnerSuite {
public:
- All() : RunnerSuite( "index" ){}
- void setupTests(){
+ All() : RunnerSuite( "index" ) {}
+ void setupTests() {
add< Int >();
add< ObjectId >();
add< String >();
@@ -435,7 +435,7 @@ namespace QueryTests {
}
void run() {
client_->findOne( ns_.c_str(),
- QUERY( "a" << "b" ).hint( BSON( "_id" << 1 ) ) );
+ QUERY( "a" << "b" ).hint( BSON( "_id" << 1 ) ) );
}
string ns_;
};
@@ -465,7 +465,7 @@ namespace QueryTests {
}
void run() {
auto_ptr< DBClientCursor > c =
- client_->query( ns_.c_str(), Query( BSONObj() ).sort( BSON( "_id" << 1 ) ) );
+ client_->query( ns_.c_str(), Query( BSONObj() ).sort( BSON( "_id" << 1 ) ) );
int i = 0;
for( ; c->more(); c->nextSafe(), ++i );
ASSERT_EQUALS( 50000, i );
@@ -481,7 +481,7 @@ namespace QueryTests {
}
void run() {
auto_ptr< DBClientCursor > c =
- client_->query( ns_.c_str(), Query( BSONObj() ).sort( BSON( "_id" << 1 ) ) );
+ client_->query( ns_.c_str(), Query( BSONObj() ).sort( BSON( "_id" << 1 ) ) );
int i = 0;
for( ; c->more(); c->nextSafe(), ++i );
ASSERT_EQUALS( 50000, i );
@@ -541,8 +541,8 @@ namespace QueryTests {
class All : public RunnerSuite {
public:
- All() : RunnerSuite( "query" ){}
- void setupTests(){
+ All() : RunnerSuite( "query" ) {}
+ void setupTests() {
add< NoMatch >();
add< NoMatchIndex >();
add< NoMatchLong >();
@@ -602,8 +602,8 @@ namespace Count {
class All : public RunnerSuite {
public:
- All() : RunnerSuite( "count" ){}
- void setupTests(){
+ All() : RunnerSuite( "count" ) {}
+ void setupTests() {
add< Count >();
add< CountIndex >();
add< CountSimpleIndex >();
@@ -677,8 +677,8 @@ namespace Plan {
class All : public RunnerSuite {
public:
- All() : RunnerSuite("plan" ){}
- void setupTests(){
+ All() : RunnerSuite("plan" ) {}
+ void setupTests() {
add< Hint >();
add< Sort >();
add< Query >();
diff --git a/dbtests/perftests.cpp b/dbtests/perftests.cpp
new file mode 100644
index 0000000..182595c
--- /dev/null
+++ b/dbtests/perftests.cpp
@@ -0,0 +1,336 @@
+/** @file perftests.cpp.cpp : unit tests relating to performance
+
+ The idea herein is tests that run fast and can be part of the normal CI suite. So no tests herein that take
+ a long time to run. Obviously we need those too, but they will be separate.
+
+ These tests use DBDirectClient; they are a bit white-boxish.
+*/
+
+/**
+ * Copyright (C) 2008 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "../db/query.h"
+#include "../db/db.h"
+#include "../db/instance.h"
+#include "../db/json.h"
+#include "../db/lasterror.h"
+#include "../db/update.h"
+#include "../db/taskqueue.h"
+#include "../util/timer.h"
+#include "dbtests.h"
+#include "../db/dur_stats.h"
+
+namespace PerfTests {
+ typedef DBDirectClient DBClientType;
+ //typedef DBClientConnection DBClientType;
+
+ class ClientBase {
+ public:
+ // NOTE: Not bothering to backup the old error record.
+ ClientBase() {
+ //_client.connect("localhost");
+ mongo::lastError.reset( new LastError() );
+ }
+ virtual ~ClientBase() {
+ //mongo::lastError.release();
+ }
+ protected:
+ static void insert( const char *ns, BSONObj o ) {
+ _client.insert( ns, o );
+ }
+ static void update( const char *ns, BSONObj q, BSONObj o, bool upsert = 0 ) {
+ _client.update( ns, Query( q ), o, upsert );
+ }
+ static bool error() {
+ return !_client.getPrevError().getField( "err" ).isNull();
+ }
+ DBClientBase &client() const { return _client; }
+ private:
+ static DBClientType _client;
+ };
+ DBClientType ClientBase::_client;
+
+ // todo: use a couple threads. not a very good test yet.
+ class TaskQueueTest {
+ static int tot;
+ struct V {
+ int val;
+ static void go(const V &v) { tot += v.val; }
+ };
+ public:
+ void run() {
+ tot = 0;
+ TaskQueue<V> d;
+ int x = 0;
+ for( int i = 0; i < 100; i++ ) {
+ if( i % 30 == 0 )
+ d.invoke();
+
+ x += i;
+ writelock lk;
+ V v;
+ v.val = i;
+ d.defer(v);
+ }
+ d.invoke();
+ assert( x == tot );
+ }
+ };
+ int TaskQueueTest::tot;
+
+ class CappedTest : public ClientBase {
+ };
+
+ class B : public ClientBase {
+ string _ns;
+ protected:
+ const char *ns() { return _ns.c_str(); }
+ virtual void prep() = 0;
+
+ virtual void timed() = 0;
+
+ // optional 2nd test phase to be timed separately
+ // return name of it
+ virtual const char * timed2() { return 0; }
+
+ virtual void post() { }
+ virtual string name() = 0;
+ virtual unsigned long long expectation() = 0;
+ virtual int howLongMillis() { return 5000; }
+ public:
+ void say(unsigned long long n, int ms, string s) {
+ cout << setw(36) << left << s << ' ' << right << setw(7) << n*1000/ms << "/sec " << setw(4) << ms << "ms" << endl;
+ cout << dur::stats.curr->_asObj().toString() << endl;
+ }
+ void run() {
+ _ns = string("perftest.") + name();
+ client().dropCollection(ns());
+
+ prep();
+
+ int hlm = howLongMillis();
+
+ dur::stats._intervalMicros = 0; // no auto rotate
+ dur::stats.curr->reset();
+ Timer t;
+ unsigned long long n = 0;
+ const unsigned Batch = 50;
+ do {
+ unsigned i;
+ for( i = 0; i < Batch; i++ )
+ timed();
+ n += i;
+ }
+ while( t.millis() < hlm );
+ client().getLastError(); // block until all ops are finished
+ int ms = t.millis();
+ say(n, ms, name());
+
+ if( n < expectation() ) {
+ cout << "\ntest " << name() << " seems slow n:" << n << " ops/sec but expect greater than:" << expectation() << endl;
+ cout << endl;
+ }
+
+ {
+ const char *test2name = timed2();
+ if( test2name ) {
+ dur::stats.curr->reset();
+ Timer t;
+ unsigned long long n = 0;
+ while( 1 ) {
+ unsigned i;
+ for( i = 0; i < Batch; i++ )
+ timed2();
+ n += i;
+ if( t.millis() > hlm )
+ break;
+ }
+ int ms = t.millis();
+ say(n, ms, test2name);
+ }
+ }
+ }
+ };
+
+ class InsertDup : public B {
+ const BSONObj o;
+ public:
+ InsertDup() : o( BSON("_id" << 1) ) { } // dup keys
+ string name() {
+ return "insert duplicate _ids";
+ }
+ void prep() {
+ client().insert( ns(), o );
+ }
+ void timed() {
+ client().insert( ns(), o );
+ }
+ void post() {
+ assert( client().count(ns()) == 1 );
+ }
+ unsigned long long expectation() { return 1000; }
+ };
+
+ class Insert1 : public InsertDup {
+ const BSONObj x;
+ public:
+ Insert1() : x( BSON("x" << 99) ) { }
+ string name() { return "insert simple"; }
+ void timed() {
+ client().insert( ns(), x );
+ }
+ void post() {
+ assert( client().count(ns()) > 100 );
+ }
+ unsigned long long expectation() { return 1000; }
+ };
+
+ class InsertBig : public InsertDup {
+ BSONObj x;
+ virtual int howLongMillis() {
+ if( sizeof(void*) == 4 )
+ return 1000; // could exceed mmapping if run too long, as this function adds a lot fasta
+ return 5000;
+ }
+ public:
+ InsertBig() {
+ char buf[200000];
+ BSONObjBuilder b;
+ b.append("x", 99);
+ b.appendBinData("bin", 200000, (BinDataType) 129, buf);
+ x = b.obj();
+ }
+ string name() { return "insert big"; }
+ void timed() {
+ client().insert( ns(), x );
+ }
+ unsigned long long expectation() { return 20; }
+ };
+
+ class InsertRandom : public B {
+ public:
+ string name() { return "random inserts"; }
+ void prep() {
+ client().insert( ns(), BSONObj() );
+ client().ensureIndex(ns(), BSON("x"<<1));
+ }
+ void timed() {
+ int x = rand();
+ BSONObj y = BSON("x" << x << "y" << rand() << "z" << 33);
+ client().insert(ns(), y);
+ }
+ void post() {
+ }
+ unsigned long long expectation() { return 1000; }
+ };
+
+ /** upserts about 32k records and then keeps updating them
+ 2 indexes
+ */
+ class Update1 : public B {
+ public:
+ static int rand() {
+ return std::rand() & 0x7fff;
+ }
+ string name() { return "random upserts"; }
+ void prep() {
+ client().insert( ns(), BSONObj() );
+ client().ensureIndex(ns(), BSON("x"<<1));
+ }
+ void timed() {
+ int x = rand();
+ BSONObj q = BSON("x" << x);
+ BSONObj y = BSON("x" << x << "y" << rand() << "z" << 33);
+ client().update(ns(), q, y, /*upsert*/true);
+ }
+
+ const char * timed2() {
+ static BSONObj I = BSON( "$inc" << BSON( "y" << 1 ) );
+
+ // test some $inc's
+
+ int x = rand();
+ BSONObj q = BSON("x" << x);
+ client().update(ns(), q, I);
+
+ return "inc";
+ }
+
+ void post() {
+ }
+ unsigned long long expectation() { return 1000; }
+ };
+
+ template <typename T>
+ class MoreIndexes : public T {
+ public:
+ string name() { return T::name() + " with more indexes"; }
+ void prep() {
+ T::prep();
+ this->client().ensureIndex(this->ns(), BSON("y"<<1));
+ this->client().ensureIndex(this->ns(), BSON("z"<<1));
+ }
+ };
+
+ void t() {
+ for( int i = 0; i < 20; i++ ) {
+ sleepmillis(21);
+ string fn = "/tmp/t1";
+ MongoMMF f;
+ unsigned long long len = 1 * 1024 * 1024;
+ assert( f.create(fn, len, /*sequential*/rand()%2==0) );
+ {
+ char *p = (char *) f.getView();
+ assert(p);
+ // write something to the private view as a test
+ strcpy(p, "hello");
+ }
+ if( cmdLine.dur ) {
+ char *w = (char *) f.view_write();
+ strcpy(w + 6, "world");
+ }
+ MongoFileFinder ff;
+ ASSERT( ff.findByPath(fn) );
+ }
+ }
+
+ class All : public Suite {
+ public:
+ All() : Suite( "perf" )
+ {
+ }
+ ~All() {
+ }
+ Result * run( const string& filter ) {
+ boost::thread a(t);
+ Result * res = Suite::run(filter);
+ a.join();
+ return res;
+ }
+
+ void setupTests() {
+ add< TaskQueueTest >();
+ add< InsertDup >();
+ add< Insert1 >();
+ add< InsertRandom >();
+ add< MoreIndexes<InsertRandom> >();
+ add< Update1 >();
+ add< MoreIndexes<Update1> >();
+ add< InsertBig >();
+ }
+ } myall;
+}
diff --git a/dbtests/queryoptimizertests.cpp b/dbtests/queryoptimizertests.cpp
index f5d1155..acf9217 100644
--- a/dbtests/queryoptimizertests.cpp
+++ b/dbtests/queryoptimizertests.cpp
@@ -27,12 +27,12 @@
namespace mongo {
extern BSONObj id_obj;
- void runQuery(Message& m, QueryMessage& q, Message &response ){
+ void runQuery(Message& m, QueryMessage& q, Message &response ) {
CurOp op( &(cc()) );
op.ensureStarted();
runQuery( m , q , op, response );
}
- void runQuery(Message& m, QueryMessage& q ){
+ void runQuery(Message& m, QueryMessage& q ) {
Message response;
runQuery( m, q, response );
}
@@ -64,14 +64,14 @@ namespace QueryOptimizerTests {
}
}
};
-
+
class NumericBase : public Base {
public:
- NumericBase(){
+ NumericBase() {
o = BSON( "min" << -numeric_limits<double>::max() << "max" << numeric_limits<double>::max() );
}
-
+
virtual BSONElement lower() { return o["min"]; }
virtual BSONElement upper() { return o["max"]; }
private:
@@ -81,7 +81,7 @@ namespace QueryOptimizerTests {
class Empty : public Base {
virtual BSONObj query() { return BSONObj(); }
};
-
+
class Eq : public Base {
public:
Eq() : o_( BSON( "a" << 1 ) ) {}
@@ -94,7 +94,7 @@ namespace QueryOptimizerTests {
class DupEq : public Eq {
public:
virtual BSONObj query() { return BSON( "a" << 1 << "b" << 2 << "a" << 1 ); }
- };
+ };
class Lt : public NumericBase {
public:
@@ -103,13 +103,13 @@ namespace QueryOptimizerTests {
virtual BSONElement upper() { return o_.firstElement(); }
virtual bool upperInclusive() { return false; }
BSONObj o_;
- };
+ };
class Lte : public Lt {
- virtual BSONObj query() { return BSON( "a" << LTE << 1 ); }
+ virtual BSONObj query() { return BSON( "a" << LTE << 1 ); }
virtual bool upperInclusive() { return true; }
};
-
+
class Gt : public NumericBase {
public:
Gt() : o_( BSON( "-" << 1 ) ) {}
@@ -117,23 +117,23 @@ namespace QueryOptimizerTests {
virtual BSONElement lower() { return o_.firstElement(); }
virtual bool lowerInclusive() { return false; }
BSONObj o_;
- };
-
+ };
+
class Gte : public Gt {
- virtual BSONObj query() { return BSON( "a" << GTE << 1 ); }
+ virtual BSONObj query() { return BSON( "a" << GTE << 1 ); }
virtual bool lowerInclusive() { return true; }
};
-
+
class TwoLt : public Lt {
- virtual BSONObj query() { return BSON( "a" << LT << 1 << LT << 5 ); }
+ virtual BSONObj query() { return BSON( "a" << LT << 1 << LT << 5 ); }
};
class TwoGt : public Gt {
- virtual BSONObj query() { return BSON( "a" << GT << 0 << GT << 1 ); }
- };
+ virtual BSONObj query() { return BSON( "a" << GT << 0 << GT << 1 ); }
+ };
class EqGte : public Eq {
- virtual BSONObj query() { return BSON( "a" << 1 << "a" << GTE << 1 ); }
+ virtual BSONObj query() { return BSON( "a" << 1 << "a" << GTE << 1 ); }
};
class EqGteInvalid {
@@ -142,7 +142,7 @@ namespace QueryOptimizerTests {
FieldRangeSet fbs( "ns", BSON( "a" << 1 << "a" << GTE << 2 ) );
ASSERT( !fbs.matchPossible() );
}
- };
+ };
struct RegexBase : Base {
void run() { //need to only look at first interval
@@ -166,7 +166,7 @@ namespace QueryOptimizerTests {
virtual BSONElement upper() { return o2_.firstElement(); }
virtual bool upperInclusive() { return false; }
BSONObj o1_, o2_;
- };
+ };
class RegexObj : public RegexBase {
public:
@@ -177,7 +177,7 @@ namespace QueryOptimizerTests {
virtual bool upperInclusive() { return false; }
BSONObj o1_, o2_;
};
-
+
class UnhelpfulRegex : public RegexBase {
public:
UnhelpfulRegex() {
@@ -191,13 +191,13 @@ namespace QueryOptimizerTests {
BSONObjBuilder b;
b.appendRegex( "a", "abc" );
return b.obj();
- }
+ }
virtual BSONElement lower() { return limits["lower"]; }
virtual BSONElement upper() { return limits["upper"]; }
virtual bool upperInclusive() { return false; }
BSONObj limits;
};
-
+
class In : public Base {
public:
In() : o1_( BSON( "-" << -3 ) ), o2_( BSON( "-" << 44 ) ) {}
@@ -219,7 +219,7 @@ namespace QueryOptimizerTests {
virtual BSONElement upper() { return o2_.firstElement(); }
BSONObj o1_, o2_;
};
-
+
class Equality {
public:
void run() {
@@ -237,7 +237,7 @@ namespace QueryOptimizerTests {
ASSERT( !s6.range( "a" ).equality() );
}
};
-
+
class SimplifiedQuery {
public:
void run() {
@@ -251,7 +251,7 @@ namespace QueryOptimizerTests {
ASSERT( !simple.getObjectField( "e" ).woCompare( fromjson( "{$gte:0,$lte:10}" ) ) );
}
};
-
+
class QueryPatternTest {
public:
void run() {
@@ -277,14 +277,14 @@ namespace QueryOptimizerTests {
return FieldRangeSet( "", query ).pattern( sort );
}
};
-
+
class NoWhere {
public:
void run() {
ASSERT_EQUALS( 0, FieldRangeSet( "ns", BSON( "$where" << 1 ) ).nNontrivialRanges() );
}
};
-
+
class Numeric {
public:
void run() {
@@ -311,29 +311,39 @@ namespace QueryOptimizerTests {
ASSERT( f.range( "a" ).max().woCompare( BSON( "a" << 3.0 ).firstElement(), false ) == 0 );
}
};
-
- class MultiBound {
- public:
- void run() {
+
+ class UnionBound {
+ public:
+ void run() {
+ FieldRangeSet frs( "", fromjson( "{a:{$gt:1,$lt:9},b:{$gt:9,$lt:12}}" ) );
+ FieldRange ret = frs.range( "a" );
+ ret |= frs.range( "b" );
+ ASSERT_EQUALS( 2U, ret.intervals().size() );
+ }
+ };
+
+ class MultiBound {
+ public:
+ void run() {
FieldRangeSet frs1( "", fromjson( "{a:{$in:[1,3,5,7,9]}}" ) );
FieldRangeSet frs2( "", fromjson( "{a:{$in:[2,3,5,8,9]}}" ) );
- FieldRange fr1 = frs1.range( "a" );
- FieldRange fr2 = frs2.range( "a" );
- fr1 &= fr2;
+ FieldRange fr1 = frs1.range( "a" );
+ FieldRange fr2 = frs2.range( "a" );
+ fr1 &= fr2;
ASSERT( fr1.min().woCompare( BSON( "a" << 3.0 ).firstElement(), false ) == 0 );
ASSERT( fr1.max().woCompare( BSON( "a" << 9.0 ).firstElement(), false ) == 0 );
- vector< FieldInterval > intervals = fr1.intervals();
- vector< FieldInterval >::const_iterator j = intervals.begin();
- double expected[] = { 3, 5, 9 };
- for( int i = 0; i < 3; ++i, ++j ) {
- ASSERT_EQUALS( expected[ i ], j->_lower._bound.number() );
- ASSERT( j->_lower._inclusive );
- ASSERT( j->_lower == j->_upper );
- }
- ASSERT( j == intervals.end() );
- }
- };
-
+ vector< FieldInterval > intervals = fr1.intervals();
+ vector< FieldInterval >::const_iterator j = intervals.begin();
+ double expected[] = { 3, 5, 9 };
+ for( int i = 0; i < 3; ++i, ++j ) {
+ ASSERT_EQUALS( expected[ i ], j->_lower._bound.number() );
+ ASSERT( j->_lower._inclusive );
+ ASSERT( j->_lower == j->_upper );
+ }
+ ASSERT( j == intervals.end() );
+ }
+ };
+
class DiffBase {
public:
virtual ~DiffBase() {}
@@ -341,7 +351,7 @@ namespace QueryOptimizerTests {
FieldRangeSet frs( "", fromjson( obj().toString() ) );
FieldRange ret = frs.range( "a" );
ret -= frs.range( "b" );
- check( ret );
+ check( ret );
}
protected:
void check( const FieldRange &fr ) {
@@ -366,7 +376,7 @@ namespace QueryOptimizerTests {
class TwoRangeBase : public DiffBase {
public:
TwoRangeBase( string obj, int low, int high, bool lowI, bool highI )
- : _obj( obj ) {
+ : _obj( obj ) {
_n[ 0 ] = low;
_n[ 1 ] = high;
_b[ 0 ] = lowI;
@@ -381,7 +391,7 @@ namespace QueryOptimizerTests {
int _n[ 2 ];
bool _b[ 2 ];
};
-
+
struct Diff1 : public TwoRangeBase {
Diff1() : TwoRangeBase( "{a:{$gt:1,$lt:2},b:{$gt:3,$lt:4}}", 1, 2, false, false ) {}
};
@@ -389,7 +399,7 @@ namespace QueryOptimizerTests {
struct Diff2 : public TwoRangeBase {
Diff2() : TwoRangeBase( "{a:{$gt:1,$lt:2},b:{$gt:2,$lt:4}}", 1, 2, false, false ) {}
};
-
+
struct Diff3 : public TwoRangeBase {
Diff3() : TwoRangeBase( "{a:{$gt:1,$lte:2},b:{$gt:2,$lt:4}}", 1, 2, false, true ) {}
};
@@ -397,11 +407,11 @@ namespace QueryOptimizerTests {
struct Diff4 : public TwoRangeBase {
Diff4() : TwoRangeBase( "{a:{$gt:1,$lt:2},b:{$gte:2,$lt:4}}", 1, 2, false, false) {}
};
-
+
struct Diff5 : public TwoRangeBase {
Diff5() : TwoRangeBase( "{a:{$gt:1,$lte:2},b:{$gte:2,$lt:4}}", 1, 2, false, false) {}
};
-
+
struct Diff6 : public TwoRangeBase {
Diff6() : TwoRangeBase( "{a:{$gt:1,$lte:3},b:{$gte:2,$lt:4}}", 1, 2, false, false) {}
};
@@ -409,7 +419,7 @@ namespace QueryOptimizerTests {
struct Diff7 : public TwoRangeBase {
Diff7() : TwoRangeBase( "{a:{$gt:1,$lte:3},b:{$gt:2,$lt:4}}", 1, 2, false, true) {}
};
-
+
struct Diff8 : public TwoRangeBase {
Diff8() : TwoRangeBase( "{a:{$gt:1,$lt:4},b:{$gt:2,$lt:4}}", 1, 2, false, true) {}
};
@@ -420,22 +430,45 @@ namespace QueryOptimizerTests {
struct Diff10 : public TwoRangeBase {
Diff10() : TwoRangeBase( "{a:{$gt:1,$lte:4},b:{$gt:2,$lte:4}}", 1, 2, false, true) {}
- };
-
- struct Diff11 : public TwoRangeBase {
- Diff11() : TwoRangeBase( "{a:{$gt:1,$lte:4},b:{$gt:2,$lt:4}}", 1, 4, false, true) {}
};
- struct Diff12 : public TwoRangeBase {
- Diff12() : TwoRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:2,$lt:4}}", 1, 5, false, false) {}
+ class SplitRangeBase : public DiffBase {
+ public:
+ SplitRangeBase( string obj, int low1, bool low1I, int high1, bool high1I, int low2, bool low2I, int high2, bool high2I )
+ : _obj( obj ) {
+ _n[ 0 ] = low1;
+ _n[ 1 ] = high1;
+ _n[ 2 ] = low2;
+ _n[ 3 ] = high2;
+ _b[ 0 ] = low1I;
+ _b[ 1 ] = high1I;
+ _b[ 2 ] = low2I;
+ _b[ 3 ] = high2I;
+ }
+ private:
+ virtual unsigned len() const { return 2; }
+ virtual const int *nums() const { return _n; }
+ virtual const bool *incs() const { return _b; }
+ virtual BSONObj obj() const { return fromjson( _obj ); }
+ string _obj;
+ int _n[ 4 ];
+ bool _b[ 4 ];
+ };
+
+ struct Diff11 : public SplitRangeBase {
+ Diff11() : SplitRangeBase( "{a:{$gt:1,$lte:4},b:{$gt:2,$lt:4}}", 1, false, 2, true, 4, true, 4, true) {}
+ };
+
+ struct Diff12 : public SplitRangeBase {
+ Diff12() : SplitRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:2,$lt:4}}", 1, false, 2, true, 4, true, 5, false) {}
};
-
+
struct Diff13 : public TwoRangeBase {
Diff13() : TwoRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:1,$lt:4}}", 4, 5, true, false) {}
};
-
- struct Diff14 : public TwoRangeBase {
- Diff14() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:{$gt:1,$lt:4}}", 1, 5, true, false) {}
+
+ struct Diff14 : public SplitRangeBase {
+ Diff14() : SplitRangeBase( "{a:{$gte:1,$lt:5},b:{$gt:1,$lt:4}}", 1, true, 1, true, 4, true, 5, false) {}
};
struct Diff15 : public TwoRangeBase {
@@ -481,7 +514,7 @@ namespace QueryOptimizerTests {
struct Diff25 : public TwoRangeBase {
Diff25() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:0}", 1, 5, true, true) {}
};
-
+
struct Diff26 : public TwoRangeBase {
Diff26() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:1}", 1, 5, false, true) {}
};
@@ -490,14 +523,14 @@ namespace QueryOptimizerTests {
Diff27() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:1}", 1, 5, false, true) {}
};
- struct Diff28 : public TwoRangeBase {
- Diff28() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:3}", 1, 5, true, true) {}
+ struct Diff28 : public SplitRangeBase {
+ Diff28() : SplitRangeBase( "{a:{$gte:1,$lte:5},b:3}", 1, true, 3, false, 3, false, 5, true) {}
};
struct Diff29 : public TwoRangeBase {
Diff29() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:5}", 1, 5, true, false) {}
};
-
+
struct Diff30 : public TwoRangeBase {
Diff30() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:5}", 1, 5, true, false) {}
};
@@ -505,7 +538,7 @@ namespace QueryOptimizerTests {
struct Diff31 : public TwoRangeBase {
Diff31() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:6}", 1, 5, true, false) {}
};
-
+
struct Diff32 : public TwoRangeBase {
Diff32() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:6}", 1, 5, true, true) {}
};
@@ -513,7 +546,7 @@ namespace QueryOptimizerTests {
class EmptyBase : public DiffBase {
public:
EmptyBase( string obj )
- : _obj( obj ) {}
+ : _obj( obj ) {}
private:
virtual unsigned len() const { return 0; }
virtual const int *nums() const { return 0; }
@@ -521,7 +554,7 @@ namespace QueryOptimizerTests {
virtual BSONObj obj() const { return fromjson( _obj ); }
string _obj;
};
-
+
struct Diff33 : public EmptyBase {
Diff33() : EmptyBase( "{a:{$gte:1,$lte:5},b:{$gt:0,$lt:6}}" ) {}
};
@@ -553,7 +586,7 @@ namespace QueryOptimizerTests {
struct Diff40 : public EmptyBase {
Diff40() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gt:0,$lte:5}}" ) {}
};
-
+
struct Diff41 : public TwoRangeBase {
Diff41() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:0,$lt:5}}", 5, 5, true, true ) {}
};
@@ -606,8 +639,8 @@ namespace QueryOptimizerTests {
Diff53() : EmptyBase( "{a:{$gte:1,$lt:5},b:{$gte:1,$lte:5}}" ) {}
};
- struct Diff54 : public TwoRangeBase {
- Diff54() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:1,$lt:5}}", 1, 5, true, true ) {}
+ struct Diff54 : public SplitRangeBase {
+ Diff54() : SplitRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:1,$lt:5}}", 1, true, 1, true, 5, true, 5, true ) {}
};
struct Diff55 : public TwoRangeBase {
@@ -621,7 +654,7 @@ namespace QueryOptimizerTests {
struct Diff57 : public EmptyBase {
Diff57() : EmptyBase( "{a:{$gte:1,$lte:5},b:{$gte:1,$lte:5}}" ) {}
};
-
+
struct Diff58 : public TwoRangeBase {
Diff58() : TwoRangeBase( "{a:1,b:{$gt:1,$lt:5}}", 1, 1, true, true ) {}
};
@@ -645,7 +678,11 @@ namespace QueryOptimizerTests {
struct Diff63 : public EmptyBase {
Diff63() : EmptyBase( "{a:5,b:5}" ) {}
};
-
+
+ struct Diff64 : public TwoRangeBase {
+ Diff64() : TwoRangeBase( "{a:{$gte:1,$lte:2},b:{$gt:0,$lte:1}}", 1, 2, false, true ) {}
+ };
+
class DiffMulti1 : public DiffBase {
public:
void run() {
@@ -656,12 +693,12 @@ namespace QueryOptimizerTests {
other |= frs.range( "d" );
other |= frs.range( "e" );
ret -= other;
- check( ret );
+ check( ret );
}
protected:
- virtual unsigned len() const { return 1; }
- virtual const int *nums() const { static int n[] = { 2, 7 }; return n; }
- virtual const bool *incs() const { static bool b[] = { true, true }; return b; }
+ virtual unsigned len() const { return 3; }
+ virtual const int *nums() const { static int n[] = { 2, 3, 3, 4, 5, 7 }; return n; }
+ virtual const bool *incs() const { static bool b[] = { true, false, false, true, true, true }; return b; }
virtual BSONObj obj() const { return BSONObj(); }
};
@@ -675,7 +712,7 @@ namespace QueryOptimizerTests {
ret |= frs.range( "d" );
ret |= frs.range( "e" );
ret -= mask;
- check( ret );
+ check( ret );
}
protected:
virtual unsigned len() const { return 2; }
@@ -683,7 +720,7 @@ namespace QueryOptimizerTests {
virtual const bool *incs() const { static bool b[] = { false, true, true, false }; return b; }
virtual BSONObj obj() const { return BSONObj(); }
};
-
+
class SetIntersect {
public:
void run() {
@@ -693,9 +730,9 @@ namespace QueryOptimizerTests {
ASSERT_EQUALS( fromjson( "{a:1,b:5,c:7,d:{$gte:8,$lte:9},e:10}" ), frs1.simplifiedQuery( BSONObj() ) );
}
};
-
+
} // namespace FieldRangeTests
-
+
namespace QueryPlanTests {
class Base {
public:
@@ -742,23 +779,25 @@ namespace QueryOptimizerTests {
static DBDirectClient client_;
};
DBDirectClient Base::client_;
-
+
// There's a limit of 10 indexes total, make sure not to exceed this in a given test.
#define INDEXNO(x) nsd()->idxNo( *this->index( BSON(x) ) )
#define INDEX(x) this->index( BSON(x) )
auto_ptr< FieldRangeSet > FieldRangeSet_GLOBAL;
#define FBS(x) ( FieldRangeSet_GLOBAL.reset( new FieldRangeSet( ns(), x ) ), *FieldRangeSet_GLOBAL )
-
+ auto_ptr< FieldRangeSet > FieldRangeSet_GLOBAL2;
+#define FBS2(x) ( FieldRangeSet_GLOBAL2.reset( new FieldRangeSet( ns(), x ) ), *FieldRangeSet_GLOBAL2 )
+
class NoIndex : public Base {
public:
void run() {
- QueryPlan p( nsd(), -1, FBS( BSONObj() ), BSONObj(), BSONObj() );
+ QueryPlan p( nsd(), -1, FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSONObj() );
ASSERT( !p.optimal() );
ASSERT( !p.scanAndOrderRequired() );
ASSERT( !p.exactKeyMatch() );
}
};
-
+
class SimpleOrder : public Base {
public:
void run() {
@@ -768,43 +807,43 @@ namespace QueryOptimizerTests {
BSONObjBuilder b2;
b2.appendMaxKey( "" );
BSONObj end = b2.obj();
-
- QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( !p.scanAndOrderRequired() );
ASSERT( !startKey( p ).woCompare( start ) );
ASSERT( !endKey( p ).woCompare( end ) );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << 1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << 1 ) );
ASSERT( !p2.scanAndOrderRequired() );
- QueryPlan p3( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "b" << 1 ) );
+ QueryPlan p3( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "b" << 1 ) );
ASSERT( p3.scanAndOrderRequired() );
ASSERT( !startKey( p3 ).woCompare( start ) );
ASSERT( !endKey( p3 ).woCompare( end ) );
}
};
-
+
class MoreIndexThanNeeded : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
- ASSERT( !p.scanAndOrderRequired() );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ ASSERT( !p.scanAndOrderRequired() );
}
};
-
+
class IndexSigns : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << -1 ) , FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
- ASSERT( !p.scanAndOrderRequired() );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << -1 ) , FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
+ ASSERT( !p.scanAndOrderRequired() );
ASSERT_EQUALS( 1, p.direction() );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
- ASSERT( p2.scanAndOrderRequired() );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
+ ASSERT( p2.scanAndOrderRequired() );
ASSERT_EQUALS( 0, p2.direction() );
- QueryPlan p3( nsd(), indexno( id_obj ), FBS( BSONObj() ), BSONObj(), BSON( "_id" << 1 ) );
+ QueryPlan p3( nsd(), indexno( id_obj ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "_id" << 1 ) );
ASSERT( !p3.scanAndOrderRequired() );
ASSERT_EQUALS( 1, p3.direction() );
- }
+ }
};
-
+
class IndexReverse : public Base {
public:
void run() {
@@ -816,18 +855,18 @@ namespace QueryOptimizerTests {
b2.appendMaxKey( "" );
b2.appendMinKey( "" );
BSONObj end = b2.obj();
- QueryPlan p( nsd(), INDEXNO( "a" << -1 << "b" << 1 ),FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
- ASSERT( !p.scanAndOrderRequired() );
+ QueryPlan p( nsd(), INDEXNO( "a" << -1 << "b" << 1 ),FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
+ ASSERT( !p.scanAndOrderRequired() );
ASSERT_EQUALS( -1, p.direction() );
ASSERT( !startKey( p ).woCompare( start ) );
ASSERT( !endKey( p ).woCompare( end ) );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << -1 << "b" << -1 ) );
- ASSERT( !p2.scanAndOrderRequired() );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << -1 << "b" << -1 ) );
+ ASSERT( !p2.scanAndOrderRequired() );
ASSERT_EQUALS( -1, p2.direction() );
- QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << -1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << -1 << "b" << -1 ) );
- ASSERT( p3.scanAndOrderRequired() );
+ QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << -1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << -1 << "b" << -1 ) );
+ ASSERT( p3.scanAndOrderRequired() );
ASSERT_EQUALS( 0, p3.direction() );
- }
+ }
};
class NoOrder : public Base {
@@ -841,143 +880,143 @@ namespace QueryOptimizerTests {
b2.append( "", 3 );
b2.appendMaxKey( "" );
BSONObj end = b2.obj();
- QueryPlan p( nsd(), INDEXNO( "a" << -1 << "b" << 1 ), FBS( BSON( "a" << 3 ) ), BSON( "a" << 3 ), BSONObj() );
- ASSERT( !p.scanAndOrderRequired() );
+ QueryPlan p( nsd(), INDEXNO( "a" << -1 << "b" << 1 ), FBS( BSON( "a" << 3 ) ), FBS2( BSON( "a" << 3 ) ), BSON( "a" << 3 ), BSONObj() );
+ ASSERT( !p.scanAndOrderRequired() );
ASSERT( !startKey( p ).woCompare( start ) );
ASSERT( !endKey( p ).woCompare( end ) );
- QueryPlan p2( nsd(), INDEXNO( "a" << -1 << "b" << 1 ), FBS( BSON( "a" << 3 ) ), BSON( "a" << 3 ), BSONObj() );
- ASSERT( !p2.scanAndOrderRequired() );
+ QueryPlan p2( nsd(), INDEXNO( "a" << -1 << "b" << 1 ), FBS( BSON( "a" << 3 ) ), FBS2( BSON( "a" << 3 ) ), BSON( "a" << 3 ), BSONObj() );
+ ASSERT( !p2.scanAndOrderRequired() );
ASSERT( !startKey( p ).woCompare( start ) );
ASSERT( !endKey( p ).woCompare( end ) );
- }
+ }
};
-
+
class EqualWithOrder : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 4 ) ), BSON( "a" << 4 ), BSON( "b" << 1 ) );
- ASSERT( !p.scanAndOrderRequired() );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "b" << 4 ) ), BSON( "b" << 4 ), BSON( "a" << 1 << "c" << 1 ) );
- ASSERT( !p2.scanAndOrderRequired() );
- QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 4 ) ), BSON( "b" << 4 ), BSON( "a" << 1 << "c" << 1 ) );
- ASSERT( p3.scanAndOrderRequired() );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 4 ) ), FBS2( BSON( "a" << 4 ) ), BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ ASSERT( !p.scanAndOrderRequired() );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "b" << 4 ) ), FBS2( BSON( "b" << 4 ) ), BSON( "b" << 4 ), BSON( "a" << 1 << "c" << 1 ) );
+ ASSERT( !p2.scanAndOrderRequired() );
+ QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 4 ) ), FBS2( BSON( "b" << 4 ) ), BSON( "b" << 4 ), BSON( "a" << 1 << "c" << 1 ) );
+ ASSERT( p3.scanAndOrderRequired() );
}
};
-
+
class Optimal : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( p.optimal() );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( p2.optimal() );
- QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 ) ), FBS2( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSON( "a" << 1 ) );
ASSERT( p3.optimal() );
- QueryPlan p4( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p4( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 ) ), FBS2( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSON( "a" << 1 ) );
ASSERT( !p4.optimal() );
- QueryPlan p5( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSON( "b" << 1 ) );
+ QueryPlan p5( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 ) ), FBS2( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSON( "b" << 1 ) );
ASSERT( p5.optimal() );
- QueryPlan p6( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSON( "b" << 1 ) );
+ QueryPlan p6( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 ) ), FBS2( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSON( "b" << 1 ) );
ASSERT( !p6.optimal() );
- QueryPlan p7( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 << "b" << 1 ) ), BSON( "a" << 1 << "b" << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p7( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 << "b" << 1 ) ), FBS2( BSON( "a" << 1 << "b" << 1 ) ), BSON( "a" << 1 << "b" << 1 ), BSON( "a" << 1 ) );
ASSERT( p7.optimal() );
- QueryPlan p8( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p8( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 << "b" << LT << 1 ) ), FBS2( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSON( "a" << 1 ) );
ASSERT( p8.optimal() );
- QueryPlan p9( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p9( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << 1 << "b" << LT << 1 ) ), FBS2( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSON( "a" << 1 ) );
ASSERT( p9.optimal() );
}
};
-
+
class MoreOptimal : public Base {
public:
void run() {
- QueryPlan p10( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSONObj() );
- ASSERT( p10.optimal() );
- QueryPlan p11( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSONObj() );
- ASSERT( p11.optimal() );
- QueryPlan p12( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << LT << 1 ) ), BSON( "a" << LT << 1 ), BSONObj() );
- ASSERT( p12.optimal() );
- QueryPlan p13( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << LT << 1 ) ), BSON( "a" << LT << 1 ), BSON( "a" << 1 ) );
- ASSERT( p13.optimal() );
+ QueryPlan p10( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << 1 ) ), FBS2( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSONObj() );
+ ASSERT( p10.optimal() );
+ QueryPlan p11( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << 1 << "b" << LT << 1 ) ), FBS2( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSONObj() );
+ ASSERT( p11.optimal() );
+ QueryPlan p12( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << LT << 1 ) ), FBS2( BSON( "a" << LT << 1 ) ), BSON( "a" << LT << 1 ), BSONObj() );
+ ASSERT( p12.optimal() );
+ QueryPlan p13( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << LT << 1 ) ), FBS2( BSON( "a" << LT << 1 ) ), BSON( "a" << LT << 1 ), BSON( "a" << 1 ) );
+ ASSERT( p13.optimal() );
}
};
-
+
class KeyMatch : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( !p.exactKeyMatch() );
- QueryPlan p2( nsd(), INDEXNO( "b" << 1 << "a" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "b" << 1 << "a" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( !p2.exactKeyMatch() );
- QueryPlan p3( nsd(), INDEXNO( "b" << 1 << "a" << 1 ), FBS( BSON( "b" << "z" ) ), BSON( "b" << "z" ), BSON( "a" << 1 ) );
+ QueryPlan p3( nsd(), INDEXNO( "b" << 1 << "a" << 1 ), FBS( BSON( "b" << "z" ) ), FBS2( BSON( "b" << "z" ) ), BSON( "b" << "z" ), BSON( "a" << 1 ) );
ASSERT( !p3.exactKeyMatch() );
- QueryPlan p4( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FBS( BSON( "c" << "y" << "b" << "z" ) ), BSON( "c" << "y" << "b" << "z" ), BSON( "a" << 1 ) );
+ QueryPlan p4( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FBS( BSON( "c" << "y" << "b" << "z" ) ), FBS2( BSON( "c" << "y" << "b" << "z" ) ), BSON( "c" << "y" << "b" << "z" ), BSON( "a" << 1 ) );
ASSERT( !p4.exactKeyMatch() );
- QueryPlan p5( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FBS( BSON( "c" << "y" << "b" << "z" ) ), BSON( "c" << "y" << "b" << "z" ), BSONObj() );
+ QueryPlan p5( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FBS( BSON( "c" << "y" << "b" << "z" ) ), FBS2( BSON( "c" << "y" << "b" << "z" ) ), BSON( "c" << "y" << "b" << "z" ), BSONObj() );
ASSERT( !p5.exactKeyMatch() );
- QueryPlan p6( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FBS( BSON( "c" << LT << "y" << "b" << GT << "z" ) ), BSON( "c" << LT << "y" << "b" << GT << "z" ), BSONObj() );
+ QueryPlan p6( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FBS( BSON( "c" << LT << "y" << "b" << GT << "z" ) ), FBS2( BSON( "c" << LT << "y" << "b" << GT << "z" ) ), BSON( "c" << LT << "y" << "b" << GT << "z" ), BSONObj() );
ASSERT( !p6.exactKeyMatch() );
- QueryPlan p7( nsd(), INDEXNO( "b" << 1 ), FBS( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p7( nsd(), INDEXNO( "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( !p7.exactKeyMatch() );
- QueryPlan p8( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << "y" << "a" << "z" ) ), BSON( "b" << "y" << "a" << "z" ), BSONObj() );
+ QueryPlan p8( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << "y" << "a" << "z" ) ), FBS2( BSON( "b" << "y" << "a" << "z" ) ), BSON( "b" << "y" << "a" << "z" ), BSONObj() );
ASSERT( p8.exactKeyMatch() );
- QueryPlan p9( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << "z" ) ), BSON( "a" << "z" ), BSON( "a" << 1 ) );
+ QueryPlan p9( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << "z" ) ), FBS2( BSON( "a" << "z" ) ), BSON( "a" << "z" ), BSON( "a" << 1 ) );
ASSERT( p9.exactKeyMatch() );
}
};
-
+
class MoreKeyMatch : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << "r" << "b" << NE << "q" ) ), BSON( "a" << "r" << "b" << NE << "q" ), BSON( "a" << 1 ) );
- ASSERT( !p.exactKeyMatch() );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << "r" << "b" << NE << "q" ) ), FBS2( BSON( "a" << "r" << "b" << NE << "q" ) ), BSON( "a" << "r" << "b" << NE << "q" ), BSON( "a" << 1 ) );
+ ASSERT( !p.exactKeyMatch() );
}
};
-
+
class ExactKeyQueryTypes : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << "b" ) ), BSON( "a" << "b" ), BSONObj() );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << "b" ) ), FBS2( BSON( "a" << "b" ) ), BSON( "a" << "b" ), BSONObj() );
ASSERT( p.exactKeyMatch() );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << 4 ) ), BSON( "a" << 4 ), BSONObj() );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << 4 ) ), FBS2( BSON( "a" << 4 ) ), BSON( "a" << 4 ), BSONObj() );
ASSERT( !p2.exactKeyMatch() );
- QueryPlan p3( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << BSON( "c" << "d" ) ) ), BSON( "a" << BSON( "c" << "d" ) ), BSONObj() );
+ QueryPlan p3( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << BSON( "c" << "d" ) ) ), FBS2( BSON( "a" << BSON( "c" << "d" ) ) ), BSON( "a" << BSON( "c" << "d" ) ), BSONObj() );
ASSERT( !p3.exactKeyMatch() );
BSONObjBuilder b;
b.appendRegex( "a", "^ddd" );
BSONObj q = b.obj();
- QueryPlan p4( nsd(), INDEXNO( "a" << 1 ), FBS( q ), q, BSONObj() );
+ QueryPlan p4( nsd(), INDEXNO( "a" << 1 ), FBS( q ), FBS2( q ), q, BSONObj() );
ASSERT( !p4.exactKeyMatch() );
- QueryPlan p5( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << "z" << "b" << 4 ) ), BSON( "a" << "z" << "b" << 4 ), BSONObj() );
+ QueryPlan p5( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << "z" << "b" << 4 ) ), FBS2( BSON( "a" << "z" << "b" << 4 ) ), BSON( "a" << "z" << "b" << 4 ), BSONObj() );
ASSERT( !p5.exactKeyMatch() );
}
};
-
+
class Unhelpful : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSONObj() );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 ) ), FBS2( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSONObj() );
ASSERT( !p.range( "a" ).nontrivial() );
ASSERT( p.unhelpful() );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 << "c" << 1 ) ), BSON( "b" << 1 << "c" << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 << "c" << 1 ) ), FBS2( BSON( "b" << 1 << "c" << 1 ) ), BSON( "b" << 1 << "c" << 1 ), BSON( "a" << 1 ) );
ASSERT( !p2.scanAndOrderRequired() );
ASSERT( !p2.range( "a" ).nontrivial() );
ASSERT( !p2.unhelpful() );
- QueryPlan p3( nsd(), INDEXNO( "b" << 1 ), FBS( BSON( "b" << 1 << "c" << 1 ) ), BSON( "b" << 1 << "c" << 1 ), BSONObj() );
+ QueryPlan p3( nsd(), INDEXNO( "b" << 1 ), FBS( BSON( "b" << 1 << "c" << 1 ) ), FBS2( BSON( "b" << 1 << "c" << 1 ) ), BSON( "b" << 1 << "c" << 1 ), BSONObj() );
ASSERT( p3.range( "b" ).nontrivial() );
ASSERT( !p3.unhelpful() );
- QueryPlan p4( nsd(), INDEXNO( "b" << 1 << "c" << 1 ), FBS( BSON( "c" << 1 << "d" << 1 ) ), BSON( "c" << 1 << "d" << 1 ), BSONObj() );
+ QueryPlan p4( nsd(), INDEXNO( "b" << 1 << "c" << 1 ), FBS( BSON( "c" << 1 << "d" << 1 ) ), FBS2( BSON( "c" << 1 << "d" << 1 ) ), BSON( "c" << 1 << "d" << 1 ), BSONObj() );
ASSERT( !p4.range( "b" ).nontrivial() );
ASSERT( p4.unhelpful() );
}
};
-
+
} // namespace QueryPlanTests
namespace QueryPlanSetTests {
class Base {
public:
- Base() : _context( ns() ){
+ Base() : _context( ns() ) {
string err;
userCreateNS( ns(), BSONObj(), err, false );
}
@@ -1000,7 +1039,7 @@ namespace QueryOptimizerTests {
if ( fieldsToReturn )
fieldsToReturn->appendSelfToBufBuilder(b);
toSend.setData(dbQuery, b.buf(), b.len());
- }
+ }
protected:
static const char *ns() { return "unittests.QueryPlanSetTests"; }
static NamespaceDetails *nsd() { return nsdetails( ns() ); }
@@ -1008,24 +1047,26 @@ namespace QueryOptimizerTests {
dblock lk_;
Client::Context _context;
};
-
+
class NoIndexes : public Base {
public:
void run() {
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ASSERT_EQUALS( 1, s.nPlans() );
}
};
-
+
class Optimal : public Base {
public:
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "b_2" );
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 4 ), BSONObj() );
- ASSERT_EQUALS( 1, s.nPlans() );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSONObj() );
+ ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1035,7 +1076,8 @@ namespace QueryOptimizerTests {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ASSERT_EQUALS( 3, s.nPlans() );
}
};
@@ -1046,11 +1088,12 @@ namespace QueryOptimizerTests {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSONObj() ) );
- QueryPlanSet s( ns(), frs, BSONObj(), BSONObj() );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSONObj(), BSONObj() );
ASSERT_EQUALS( 1, s.nPlans() );
}
};
-
+
class HintSpec : public Base {
public:
void run() {
@@ -1059,8 +1102,9 @@ namespace QueryOptimizerTests {
BSONObj b = BSON( "hint" << BSON( "a" << 1 ) );
BSONElement e = b.firstElement();
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 1 ), BSON( "b" << 1 ), &e );
- ASSERT_EQUALS( 1, s.nPlans() );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), &e );
+ ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1072,11 +1116,12 @@ namespace QueryOptimizerTests {
BSONObj b = BSON( "hint" << "a_1" );
BSONElement e = b.firstElement();
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 1 ), BSON( "b" << 1 ), &e );
- ASSERT_EQUALS( 1, s.nPlans() );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), &e );
+ ASSERT_EQUALS( 1, s.nPlans() );
}
};
-
+
class NaturalHint : public Base {
public:
void run() {
@@ -1085,8 +1130,9 @@ namespace QueryOptimizerTests {
BSONObj b = BSON( "hint" << BSON( "$natural" << 1 ) );
BSONElement e = b.firstElement();
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 1 ), BSON( "b" << 1 ), &e );
- ASSERT_EQUALS( 1, s.nPlans() );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), &e );
+ ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1096,7 +1142,8 @@ namespace QueryOptimizerTests {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "b_2" );
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 1 ), BSON( "$natural" << 1 ) );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 ), BSON( "$natural" << 1 ) );
ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1107,11 +1154,12 @@ namespace QueryOptimizerTests {
BSONObj b = BSON( "hint" << "a_1" );
BSONElement e = b.firstElement();
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 ) ) );
- ASSERT_EXCEPTION( QueryPlanSet s( ns(), frs, BSON( "a" << 1 ), BSON( "b" << 1 ), &e ),
- AssertionException );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ ASSERT_EXCEPTION( QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), &e ),
+ AssertionException );
}
};
-
+
class Count : public Base {
public:
void run() {
@@ -1136,7 +1184,7 @@ namespace QueryOptimizerTests {
ASSERT_EQUALS( 0, runCount( ns(), BSON( "query" << BSON( "a" << GT << 0 << LT << -1 ) ), err ) );
}
};
-
+
class QueryMissingNs : public Base {
public:
QueryMissingNs() { log() << "querymissingns starts" << endl; }
@@ -1154,25 +1202,27 @@ namespace QueryOptimizerTests {
}
};
-
+
class UnhelpfulIndex : public Base {
public:
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 << "c" << 2 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 1 << "c" << 2 ), BSONObj() );
- ASSERT_EQUALS( 2, s.nPlans() );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 << "c" << 2 ), BSONObj() );
+ ASSERT_EQUALS( 2, s.nPlans() );
}
- };
-
+ };
+
class SingleException : public Base {
public:
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ASSERT_EQUALS( 3, s.nPlans() );
bool threw = false;
auto_ptr< TestOp > t( new TestOp( true, threw ) );
@@ -1200,6 +1250,7 @@ namespace QueryOptimizerTests {
return op;
}
virtual bool mayRecordPlan() const { return true; }
+ virtual long long nscanned() { return 0; }
private:
bool iThrow_;
bool &threw_;
@@ -1207,14 +1258,15 @@ namespace QueryOptimizerTests {
mutable bool youThrow_;
};
};
-
+
class AllException : public Base {
public:
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ASSERT_EQUALS( 3, s.nPlans() );
auto_ptr< TestOp > t( new TestOp() );
boost::shared_ptr< TestOp > done = s.runOp( *t );
@@ -1233,9 +1285,10 @@ namespace QueryOptimizerTests {
return new TestOp();
}
virtual bool mayRecordPlan() const { return true; }
+ virtual long long nscanned() { return 0; }
};
};
-
+
class SaveGoodIndex : public Base {
public:
void run() {
@@ -1249,7 +1302,7 @@ namespace QueryOptimizerTests {
nPlans( 3 );
runQuery();
nPlans( 1 );
-
+
{
DBDirectClient client;
for( int i = 0; i < 34; ++i ) {
@@ -1259,9 +1312,10 @@ namespace QueryOptimizerTests {
}
}
nPlans( 3 );
-
+
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
NoRecordTestOp original;
s.runOp( original );
nPlans( 3 );
@@ -1269,29 +1323,33 @@ namespace QueryOptimizerTests {
BSONObj hint = fromjson( "{hint:{$natural:1}}" );
BSONElement hintElt = hint.firstElement();
auto_ptr< FieldRangeSet > frs2( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s2( ns(), frs2, BSON( "a" << 4 ), BSON( "b" << 1 ), &hintElt );
+ auto_ptr< FieldRangeSet > frsOrig2( new FieldRangeSet( *frs2 ) );
+ QueryPlanSet s2( ns(), frs2, frsOrig2, BSON( "a" << 4 ), BSON( "b" << 1 ), &hintElt );
TestOp newOriginal;
s2.runOp( newOriginal );
nPlans( 3 );
auto_ptr< FieldRangeSet > frs3( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s3( ns(), frs3, BSON( "a" << 4 ), BSON( "b" << 1 << "c" << 1 ) );
+ auto_ptr< FieldRangeSet > frsOrig3( new FieldRangeSet( *frs3 ) );
+ QueryPlanSet s3( ns(), frs3, frsOrig3, BSON( "a" << 4 ), BSON( "b" << 1 << "c" << 1 ) );
TestOp newerOriginal;
s3.runOp( newerOriginal );
- nPlans( 3 );
-
+ nPlans( 3 );
+
runQuery();
nPlans( 1 );
}
private:
void nPlans( int n ) {
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 4 ), BSON( "b" << 1 ) );
- ASSERT_EQUALS( n, s.nPlans() );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ ASSERT_EQUALS( n, s.nPlans() );
}
void runQuery() {
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
TestOp original;
s.runOp( original );
}
@@ -1305,33 +1363,37 @@ namespace QueryOptimizerTests {
return new TestOp();
}
virtual bool mayRecordPlan() const { return true; }
+ virtual long long nscanned() { return 0; }
};
class NoRecordTestOp : public TestOp {
virtual bool mayRecordPlan() const { return false; }
virtual QueryOp *_createChild() const { return new NoRecordTestOp(); }
};
- };
-
+ };
+
class TryAllPlansOnErr : public Base {
public:
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s( ns(), frs, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ScanOnlyTestOp op;
s.runOp( op );
ASSERT( fromjson( "{$natural:1}" ).woCompare( NamespaceDetailsTransient::_get( ns() ).indexForPattern( s.fbs().pattern( BSON( "b" << 1 ) ) ) ) == 0 );
ASSERT_EQUALS( 1, NamespaceDetailsTransient::_get( ns() ).nScannedForPattern( s.fbs().pattern( BSON( "b" << 1 ) ) ) );
-
+
auto_ptr< FieldRangeSet > frs2( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- QueryPlanSet s2( ns(), frs2, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSet > frsOrig2( new FieldRangeSet( *frs2 ) );
+ QueryPlanSet s2( ns(), frs2, frsOrig2, BSON( "a" << 4 ), BSON( "b" << 1 ) );
TestOp op2;
ASSERT( s2.runOp( op2 )->complete() );
}
private:
class TestOp : public QueryOp {
public:
+ TestOp() {}
virtual void _init() {}
virtual void next() {
if ( qp().indexKey().firstElement().fieldName() == string( "$natural" ) )
@@ -1342,6 +1404,7 @@ namespace QueryOptimizerTests {
return new TestOp();
}
virtual bool mayRecordPlan() const { return true; }
+ virtual long long nscanned() { return 1; }
};
class ScanOnlyTestOp : public TestOp {
virtual void next() {
@@ -1354,7 +1417,7 @@ namespace QueryOptimizerTests {
}
};
};
-
+
class FindOne : public Base {
public:
void run() {
@@ -1362,12 +1425,12 @@ namespace QueryOptimizerTests {
theDataFileMgr.insertWithObjMod( ns(), one );
BSONObj result;
ASSERT( Helpers::findOne( ns(), BSON( "a" << 1 ), result ) );
- ASSERT_EXCEPTION( Helpers::findOne( ns(), BSON( "a" << 1 ), result, true ), AssertionException );
+ ASSERT_EXCEPTION( Helpers::findOne( ns(), BSON( "a" << 1 ), result, true ), AssertionException );
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
- ASSERT( Helpers::findOne( ns(), BSON( "a" << 1 ), result, true ) );
+ ASSERT( Helpers::findOne( ns(), BSON( "a" << 1 ), result, true ) );
}
};
-
+
class Delete : public Base {
public:
void run() {
@@ -1380,10 +1443,10 @@ namespace QueryOptimizerTests {
theDataFileMgr.insertWithObjMod( ns(), one );
deleteObjects( ns(), BSON( "a" << 1 ), false );
ASSERT( BSON( "a" << 1 ).woCompare( NamespaceDetailsTransient::_get( ns() ).indexForPattern( FieldRangeSet( ns(), BSON( "a" << 1 ) ).pattern() ) ) == 0 );
- ASSERT_EQUALS( 2, NamespaceDetailsTransient::_get( ns() ).nScannedForPattern( FieldRangeSet( ns(), BSON( "a" << 1 ) ).pattern() ) );
+ ASSERT_EQUALS( 1, NamespaceDetailsTransient::_get( ns() ).nScannedForPattern( FieldRangeSet( ns(), BSON( "a" << 1 ) ).pattern() ) );
}
};
-
+
class DeleteOneScan : public Base {
public:
void run() {
@@ -1410,7 +1473,7 @@ namespace QueryOptimizerTests {
theDataFileMgr.insertWithObjMod( ns(), one );
theDataFileMgr.insertWithObjMod( ns(), two );
theDataFileMgr.insertWithObjMod( ns(), three );
- deleteObjects( ns(), BSON( "a" << GTE << 0 << "_id" << GT << 0 ), true );
+ deleteObjects( ns(), BSON( "a" << GTE << 0 ), true );
for( boost::shared_ptr<Cursor> c = theDataFileMgr.findAll( ns() ); c->ok(); c->advance() )
ASSERT( 2 != c->current().getIntField( "_id" ) );
}
@@ -1436,7 +1499,7 @@ namespace QueryOptimizerTests {
runQuery( m, q);
}
ASSERT( BSON( "$natural" << 1 ).woCompare( NamespaceDetailsTransient::_get( ns() ).indexForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ) ).pattern() ) ) == 0 );
-
+
Message m2;
assembleRequest( ns(), QUERY( "b" << 99 << "a" << GTE << 0 ).obj, 2, 0, 0, 0, m2 );
{
@@ -1444,11 +1507,11 @@ namespace QueryOptimizerTests {
QueryMessage q(d);
runQuery( m2, q);
}
- ASSERT( BSON( "a" << 1 ).woCompare( NamespaceDetailsTransient::_get( ns() ).indexForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ) ).pattern() ) ) == 0 );
- ASSERT_EQUALS( 2, NamespaceDetailsTransient::_get( ns() ).nScannedForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ) ).pattern() ) );
+ ASSERT( BSON( "a" << 1 ).woCompare( NamespaceDetailsTransient::_get( ns() ).indexForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ) ).pattern() ) ) == 0 );
+ ASSERT_EQUALS( 3, NamespaceDetailsTransient::_get( ns() ).nScannedForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ) ).pattern() ) );
}
};
-
+
class InQueryIntervals : public Base {
public:
void run() {
@@ -1460,30 +1523,32 @@ namespace QueryOptimizerTests {
BSONObj hint = fromjson( "{$hint:{a:1}}" );
BSONElement hintElt = hint.firstElement();
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ) ) );
- QueryPlanSet s( ns(), frs, fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSONObj(), &hintElt );
- QueryPlan qp( nsd(), 1, s.fbs(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSONObj() );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSONObj(), &hintElt );
+ QueryPlan qp( nsd(), 1, s.fbs(), s.originalFrs(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSONObj() );
boost::shared_ptr<Cursor> c = qp.newCursor();
double expected[] = { 2, 3, 6, 9 };
for( int i = 0; i < 4; ++i, c->advance() ) {
ASSERT_EQUALS( expected[ i ], c->current().getField( "a" ).number() );
}
ASSERT( !c->ok() );
-
+
// now check reverse
{
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ) ) );
- QueryPlanSet s( ns(), frs, fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSON( "a" << -1 ), &hintElt );
- QueryPlan qp( nsd(), 1, s.fbs(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSON( "a" << -1 ) );
+ auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
+ QueryPlanSet s( ns(), frs, frsOrig, fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSON( "a" << -1 ), &hintElt );
+ QueryPlan qp( nsd(), 1, s.fbs(), s.originalFrs(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSON( "a" << -1 ) );
boost::shared_ptr<Cursor> c = qp.newCursor();
double expected[] = { 9, 6, 3, 2 };
for( int i = 0; i < 4; ++i, c->advance() ) {
ASSERT_EQUALS( expected[ i ], c->current().getField( "a" ).number() );
}
- ASSERT( !c->ok() );
+ ASSERT( !c->ok() );
}
}
};
-
+
class EqualityThenIn : public Base {
public:
void run() {
@@ -1494,8 +1559,8 @@ namespace QueryOptimizerTests {
}
BSONObj hint = fromjson( "{$hint:{a:1,b:1}}" );
BSONElement hintElt = hint.firstElement();
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), fromjson( "{a:5,b:{$in:[2,3,6,9,11]}}" ) ) );
- QueryPlan qp( nsd(), 1, *frs, fromjson( "{a:5,b:{$in:[2,3,6,9,11]}}" ), BSONObj() );
+ auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), fromjson( "{a:5,b:{$in:[2,3,6,9,11]}}" ) ) );
+ QueryPlan qp( nsd(), 1, *frs, *frs, fromjson( "{a:5,b:{$in:[2,3,6,9,11]}}" ), BSONObj() );
boost::shared_ptr<Cursor> c = qp.newCursor();
double expected[] = { 2, 3, 6, 9 };
ASSERT( c->ok() );
@@ -1506,7 +1571,7 @@ namespace QueryOptimizerTests {
ASSERT( !c->ok() );
}
};
-
+
class NotEqualityThenIn : public Base {
public:
void run() {
@@ -1518,7 +1583,7 @@ namespace QueryOptimizerTests {
BSONObj hint = fromjson( "{$hint:{a:1,b:1}}" );
BSONElement hintElt = hint.firstElement();
auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), fromjson( "{a:{$gte:5},b:{$in:[2,3,6,9,11]}}" ) ) );
- QueryPlan qp( nsd(), 1, *frs, fromjson( "{a:{$gte:5},b:{$in:[2,3,6,9,11]}}" ), BSONObj() );
+ QueryPlan qp( nsd(), 1, *frs, *frs, fromjson( "{a:{$gte:5},b:{$in:[2,3,6,9,11]}}" ), BSONObj() );
boost::shared_ptr<Cursor> c = qp.newCursor();
int matches[] = { 2, 3, 6, 9 };
for( int i = 0; i < 4; ++i, c->advance() ) {
@@ -1529,7 +1594,7 @@ namespace QueryOptimizerTests {
};
} // namespace QueryPlanSetTests
-
+
class Base {
public:
Base() : _ctx( ns() ) {
@@ -1549,7 +1614,7 @@ namespace QueryOptimizerTests {
dblock lk_;
Client::Context _ctx;
};
-
+
class BestGuess : public Base {
public:
void run() {
@@ -1559,7 +1624,7 @@ namespace QueryOptimizerTests {
theDataFileMgr.insertWithObjMod( ns(), temp );
temp = BSON( "b" << 1 );
theDataFileMgr.insertWithObjMod( ns(), temp );
-
+
boost::shared_ptr< Cursor > c = bestGuessCursor( ns(), BSON( "b" << 1 ), BSON( "a" << 1 ) );
ASSERT_EQUALS( string( "a" ), c->indexKeyPattern().firstElement().fieldName() );
c = bestGuessCursor( ns(), BSON( "a" << 1 ), BSON( "b" << 1 ) );
@@ -1568,22 +1633,22 @@ namespace QueryOptimizerTests {
ASSERT_EQUALS( string( "a" ), m->sub_c()->indexKeyPattern().firstElement().fieldName() );
m = dynamic_pointer_cast< MultiCursor >( bestGuessCursor( ns(), fromjson( "{a:1,$or:[{y:1}]}" ), BSON( "b" << 1 ) ) );
ASSERT_EQUALS( string( "b" ), m->sub_c()->indexKeyPattern().firstElement().fieldName() );
-
+
FieldRangeSet frs( "ns", BSON( "a" << 1 ) );
{
scoped_lock lk(NamespaceDetailsTransient::_qcMutex);
- NamespaceDetailsTransient::get_inlock( ns() ).registerIndexForPattern( frs.pattern( BSON( "b" << 1 ) ), BSON( "a" << 1 ), 0 );
+ NamespaceDetailsTransient::get_inlock( ns() ).registerIndexForPattern( frs.pattern( BSON( "b" << 1 ) ), BSON( "a" << 1 ), 0 );
}
m = dynamic_pointer_cast< MultiCursor >( bestGuessCursor( ns(), fromjson( "{a:1,$or:[{y:1}]}" ), BSON( "b" << 1 ) ) );
ASSERT_EQUALS( string( "b" ), m->sub_c()->indexKeyPattern().firstElement().fieldName() );
}
};
-
+
class All : public Suite {
public:
- All() : Suite( "queryoptimizer" ){}
-
- void setupTests(){
+ All() : Suite( "queryoptimizer" ) {}
+
+ void setupTests() {
add< FieldRangeTests::Empty >();
add< FieldRangeTests::Eq >();
add< FieldRangeTests::DupEq >();
@@ -1606,6 +1671,7 @@ namespace QueryOptimizerTests {
add< FieldRangeTests::Numeric >();
add< FieldRangeTests::InLowerBound >();
add< FieldRangeTests::InUpperBound >();
+ add< FieldRangeTests::UnionBound >();
add< FieldRangeTests::MultiBound >();
add< FieldRangeTests::Diff1 >();
add< FieldRangeTests::Diff2 >();
@@ -1670,6 +1736,7 @@ namespace QueryOptimizerTests {
add< FieldRangeTests::Diff61 >();
add< FieldRangeTests::Diff62 >();
add< FieldRangeTests::Diff63 >();
+ add< FieldRangeTests::Diff64 >();
add< FieldRangeTests::DiffMulti1 >();
add< FieldRangeTests::DiffMulti2 >();
add< FieldRangeTests::SetIntersect >();
@@ -1713,6 +1780,6 @@ namespace QueryOptimizerTests {
add< BestGuess >();
}
} myall;
-
+
} // namespace QueryOptimizerTests
diff --git a/dbtests/querytests.cpp b/dbtests/querytests.cpp
index 31e1879..d008e4d 100644
--- a/dbtests/querytests.cpp
+++ b/dbtests/querytests.cpp
@@ -25,6 +25,8 @@
#include "../db/json.h"
#include "../db/lasterror.h"
+#include "../util/timer.h"
+
#include "dbtests.h"
namespace mongo {
@@ -37,7 +39,7 @@ namespace QueryTests {
dblock lk;
Client::Context _context;
public:
- Base() : _context( ns() ){
+ Base() : _context( ns() ) {
addIndex( fromjson( "{\"a\":1}" ) );
}
~Base() {
@@ -48,7 +50,8 @@ namespace QueryTests {
toDelete.push_back( c->currLoc() );
for( vector< DiskLoc >::iterator i = toDelete.begin(); i != toDelete.end(); ++i )
theDataFileMgr.deleteRecord( ns(), i->rec(), *i, false );
- } catch ( ... ) {
+ }
+ catch ( ... ) {
FAIL( "Exception while cleaning up records" );
}
}
@@ -129,7 +132,7 @@ namespace QueryTests {
ASSERT_EQUALS( 1, runCount( ns(), cmd, err ) );
}
};
-
+
class FindOne : public Base {
public:
void run() {
@@ -145,12 +148,11 @@ namespace QueryTests {
class ClientBase {
public:
- // NOTE: Not bothering to backup the old error record.
ClientBase() {
mongo::lastError.reset( new LastError() );
}
~ClientBase() {
- mongo::lastError.release();
+ //mongo::lastError.release();
}
protected:
static void insert( const char *ns, BSONObj o ) {
@@ -170,6 +172,9 @@ namespace QueryTests {
class BoundedKey : public ClientBase {
public:
+ ~BoundedKey() {
+ client().dropCollection( "unittests.querytests.BoundedKey" );
+ }
void run() {
const char *ns = "unittests.querytests.BoundedKey";
insert( ns, BSON( "a" << 1 ) );
@@ -210,7 +215,7 @@ namespace QueryTests {
client().dropCollection( ns );
}
- void testLimit(int limit){
+ void testLimit(int limit) {
ASSERT_EQUALS(client().query( ns, BSONObj(), limit )->itcount(), limit);
}
void run() {
@@ -285,7 +290,7 @@ namespace QueryTests {
insert( ns, BSON( "a" << 0 ) );
c = client().query( ns, QUERY( "a" << 1 ).hint( BSON( "$natural" << 1 ) ), 2, 0, 0, QueryOption_CursorTailable );
ASSERT( 0 != c->getCursorId() );
- ASSERT( !c->isDead() );
+ ASSERT( !c->isDead() );
}
};
@@ -345,7 +350,7 @@ namespace QueryTests {
ASSERT( !client().getLastError().empty() );
}
};
-
+
class TailableQueryOnId : public ClientBase {
public:
~TailableQueryOnId() {
@@ -511,7 +516,13 @@ namespace QueryTests {
static const char *ns() { return "unittests.querytests.AutoResetIndexCache"; }
static const char *idxNs() { return "unittests.system.indexes"; }
void index() const { ASSERT( !client().findOne( idxNs(), BSON( "name" << NE << "_id_" ) ).isEmpty() ); }
- void noIndex() const { ASSERT( client().findOne( idxNs(), BSON( "name" << NE << "_id_" ) ).isEmpty() ); }
+ void noIndex() const {
+ BSONObj o = client().findOne( idxNs(), BSON( "name" << NE << "_id_" ) );
+ if( !o.isEmpty() ) {
+ cout << o.toString() << endl;
+ ASSERT( false );
+ }
+ }
void checkIndex() {
client().ensureIndex( ns(), BSON( "a" << 1 ) );
index();
@@ -598,8 +609,8 @@ namespace QueryTests {
client().insert( ns, fromjson( "{a:[1,2,3]}" ) );
ASSERT( client().query( ns, Query( "{a:[1,2,3]}" ) )->more() );
client().ensureIndex( ns, BSON( "a" << 1 ) );
- ASSERT( client().query( ns, Query( "{a:{$in:[1,[1,2,3]]}}" ).hint( BSON( "a" << 1 ) ) )->more() );
- ASSERT( client().query( ns, Query( "{a:[1,2,3]}" ).hint( BSON( "a" << 1 ) ) )->more() ); // SERVER-146
+ ASSERT( client().query( ns, Query( "{a:{$in:[1,[1,2,3]]}}" ).hint( BSON( "a" << 1 ) ) )->more() );
+ ASSERT( client().query( ns, Query( "{a:[1,2,3]}" ).hint( BSON( "a" << 1 ) ) )->more() ); // SERVER-146
}
};
@@ -613,7 +624,7 @@ namespace QueryTests {
client().insert( ns, fromjson( "{a:[[1],2]}" ) );
check( "$natural" );
client().ensureIndex( ns, BSON( "a" << 1 ) );
- check( "a" ); // SERVER-146
+ check( "a" ); // SERVER-146
}
private:
void check( const string &hintField ) {
@@ -756,12 +767,12 @@ namespace QueryTests {
class DifferentNumbers : public ClientBase {
public:
- ~DifferentNumbers(){
+ ~DifferentNumbers() {
client().dropCollection( "unittests.querytests.DifferentNumbers" );
}
- void t( const char * ns ){
+ void t( const char * ns ) {
auto_ptr< DBClientCursor > cursor = client().query( ns, Query().sort( "7" ) );
- while ( cursor->more() ){
+ while ( cursor->more() ) {
BSONObj o = cursor->next();
assert( o.valid() );
//cout << " foo " << o << endl;
@@ -782,37 +793,37 @@ namespace QueryTests {
t(ns);
}
};
-
+
class CollectionBase : public ClientBase {
public:
-
- CollectionBase( string leaf ){
+
+ CollectionBase( string leaf ) {
_ns = "unittests.querytests.";
_ns += leaf;
client().dropCollection( ns() );
}
-
- virtual ~CollectionBase(){
+
+ virtual ~CollectionBase() {
client().dropCollection( ns() );
}
-
- int count(){
+
+ int count() {
return (int) client().count( ns() );
}
- const char * ns(){
+ const char * ns() {
return _ns.c_str();
}
-
+
private:
string _ns;
};
class SymbolStringSame : public CollectionBase {
public:
- SymbolStringSame() : CollectionBase( "symbolstringsame" ){}
+ SymbolStringSame() : CollectionBase( "symbolstringsame" ) {}
- void run(){
+ void run() {
{ BSONObjBuilder b; b.appendSymbol( "x" , "eliot" ); b.append( "z" , 17 ); client().insert( ns() , b.obj() ); }
ASSERT_EQUALS( 17 , client().findOne( ns() , BSONObj() )["z"].number() );
{
@@ -828,46 +839,46 @@ namespace QueryTests {
class TailableCappedRaceCondition : public CollectionBase {
public:
-
- TailableCappedRaceCondition() : CollectionBase( "tailablecappedrace" ){
+
+ TailableCappedRaceCondition() : CollectionBase( "tailablecappedrace" ) {
client().dropCollection( ns() );
_n = 0;
}
- void run(){
+ void run() {
string err;
- writelock lk("");
+ writelock lk("");
Client::Context ctx( "unittests" );
ASSERT( userCreateNS( ns() , fromjson( "{ capped : true , size : 2000 }" ) , err , false ) );
- for ( int i=0; i<100; i++ ){
+ for ( int i=0; i<100; i++ ) {
insertNext();
ASSERT( count() < 45 );
}
-
+
int a = count();
-
+
auto_ptr< DBClientCursor > c = client().query( ns() , QUERY( "i" << GT << 0 ).hint( BSON( "$natural" << 1 ) ), 0, 0, 0, QueryOption_CursorTailable );
int n=0;
- while ( c->more() ){
+ while ( c->more() ) {
BSONObj z = c->next();
n++;
}
-
+
ASSERT_EQUALS( a , n );
insertNext();
ASSERT( c->more() );
- for ( int i=0; i<50; i++ ){
+ for ( int i=0; i<50; i++ ) {
insertNext();
}
- while ( c->more() ){ c->next(); }
+ while ( c->more() ) { c->next(); }
ASSERT( c->isDead() );
}
-
- void insertNext(){
+
+ void insertNext() {
insert( ns() , BSON( "i" << _n++ ) );
}
@@ -876,89 +887,71 @@ namespace QueryTests {
class HelperTest : public CollectionBase {
public:
-
- HelperTest() : CollectionBase( "helpertest" ){
+
+ HelperTest() : CollectionBase( "helpertest" ) {
}
- void run(){
+ void run() {
writelock lk("");
Client::Context ctx( "unittests" );
-
- for ( int i=0; i<50; i++ ){
+
+ for ( int i=0; i<50; i++ ) {
insert( ns() , BSON( "_id" << i << "x" << i * 2 ) );
}
ASSERT_EQUALS( 50 , count() );
-
+
BSONObj res;
ASSERT( Helpers::findOne( ns() , BSON( "_id" << 20 ) , res , true ) );
ASSERT_EQUALS( 40 , res["x"].numberInt() );
-
+
ASSERT( Helpers::findById( cc(), ns() , BSON( "_id" << 20 ) , res ) );
ASSERT_EQUALS( 40 , res["x"].numberInt() );
ASSERT( ! Helpers::findById( cc(), ns() , BSON( "_id" << 200 ) , res ) );
unsigned long long slow , fast;
-
+
int n = 10000;
{
Timer t;
- for ( int i=0; i<n; i++ ){
+ for ( int i=0; i<n; i++ ) {
ASSERT( Helpers::findOne( ns() , BSON( "_id" << 20 ) , res , true ) );
}
slow = t.micros();
}
{
Timer t;
- for ( int i=0; i<n; i++ ){
+ for ( int i=0; i<n; i++ ) {
ASSERT( Helpers::findById( cc(), ns() , BSON( "_id" << 20 ) , res ) );
}
fast = t.micros();
}
-
+
cout << "HelperTest slow:" << slow << " fast:" << fast << endl;
-
- {
- auto_ptr<CursorIterator> i = Helpers::find( ns() );
- int n = 0;
- while ( i->hasNext() ){
- BSONObj o = i->next();
- n++;
- }
- ASSERT_EQUALS( 50 , n );
- i = Helpers::find( ns() , BSON( "_id" << 20 ) );
- n = 0;
- while ( i->hasNext() ){
- BSONObj o = i->next();
- n++;
- }
- ASSERT_EQUALS( 1 , n );
- }
-
}
};
class HelperByIdTest : public CollectionBase {
public:
-
- HelperByIdTest() : CollectionBase( "helpertestbyid" ){
+
+ HelperByIdTest() : CollectionBase( "helpertestbyid" ) {
}
- void run(){
+ void run() {
writelock lk("");
Client::Context ctx( "unittests" );
- for ( int i=0; i<1000; i++ ){
+ for ( int i=0; i<1000; i++ ) {
insert( ns() , BSON( "_id" << i << "x" << i * 2 ) );
}
- for ( int i=0; i<1000; i+=2 ){
+ for ( int i=0; i<1000; i+=2 ) {
client_.remove( ns() , BSON( "_id" << i ) );
}
- BSONObj res;
- for ( int i=0; i<1000; i++ ){
+ BSONObj res;
+ for ( int i=0; i<1000; i++ ) {
bool found = Helpers::findById( cc(), ns() , BSON( "_id" << i ) , res );
ASSERT_EQUALS( i % 2 , int(found) );
}
@@ -966,19 +959,19 @@ namespace QueryTests {
}
};
- class ClientCursorTest : public CollectionBase{
- ClientCursorTest() : CollectionBase( "clientcursortest" ){
+ class ClientCursorTest : public CollectionBase {
+ ClientCursorTest() : CollectionBase( "clientcursortest" ) {
}
- void run(){
+ void run() {
writelock lk("");
Client::Context ctx( "unittests" );
-
- for ( int i=0; i<1000; i++ ){
+
+ for ( int i=0; i<1000; i++ ) {
insert( ns() , BSON( "_id" << i << "x" << i * 2 ) );
}
-
+
}
};
@@ -990,19 +983,19 @@ namespace QueryTests {
~FindingStart() {
__findingStartInitialTimeout = _old;
}
-
+
void run() {
BSONObj info;
ASSERT( client().runCommand( "unittests", BSON( "create" << "querytests.findingstart" << "capped" << true << "size" << 1000 << "$nExtents" << 5 << "autoIndexId" << false ), info ) );
-
+
int i = 0;
for( int oldCount = -1;
- count() != oldCount;
- oldCount = count(), client().insert( ns(), BSON( "ts" << i++ ) ) );
+ count() != oldCount;
+ oldCount = count(), client().insert( ns(), BSON( "ts" << i++ ) ) );
for( int k = 0; k < 5; ++k ) {
client().insert( ns(), BSON( "ts" << i++ ) );
- int min = client().query( ns(), Query().sort( BSON( "$natural" << 1 ) ) )->next()[ "ts" ].numberInt();
+ int min = client().query( ns(), Query().sort( BSON( "$natural" << 1 ) ) )->next()[ "ts" ].numberInt();
for( int j = -1; j < i; ++j ) {
auto_ptr< DBClientCursor > c = client().query( ns(), QUERY( "ts" << GTE << j ), 0, 0, 0, QueryOption_OplogReplay );
ASSERT( c->more() );
@@ -1012,7 +1005,7 @@ namespace QueryTests {
}
}
}
-
+
private:
int _old;
};
@@ -1025,17 +1018,19 @@ namespace QueryTests {
~FindingStartPartiallyFull() {
__findingStartInitialTimeout = _old;
}
-
+
void run() {
+ unsigned startNumCursors = ClientCursor::numCursors();
+
BSONObj info;
ASSERT( client().runCommand( "unittests", BSON( "create" << "querytests.findingstart" << "capped" << true << "size" << 10000 << "$nExtents" << 5 << "autoIndexId" << false ), info ) );
-
+
int i = 0;
for( ; i < 150; client().insert( ns(), BSON( "ts" << i++ ) ) );
-
+
for( int k = 0; k < 5; ++k ) {
client().insert( ns(), BSON( "ts" << i++ ) );
- int min = client().query( ns(), Query().sort( BSON( "$natural" << 1 ) ) )->next()[ "ts" ].numberInt();
+ int min = client().query( ns(), Query().sort( BSON( "$natural" << 1 ) ) )->next()[ "ts" ].numberInt();
for( int j = -1; j < i; ++j ) {
auto_ptr< DBClientCursor > c = client().query( ns(), QUERY( "ts" << GTE << j ), 0, 0, 0, QueryOption_OplogReplay );
ASSERT( c->more() );
@@ -1044,13 +1039,15 @@ namespace QueryTests {
ASSERT_EQUALS( ( j > min ? j : min ), next[ "ts" ].numberInt() );
}
}
+
+ ASSERT_EQUALS( startNumCursors, ClientCursor::numCursors() );
}
-
+
private:
int _old;
};
-
-
+
+
class WhatsMyUri : public CollectionBase {
public:
WhatsMyUri() : CollectionBase( "whatsmyuri" ) {}
@@ -1060,15 +1057,15 @@ namespace QueryTests {
ASSERT_EQUALS( unknownAddress.toString(), result[ "you" ].str() );
}
};
-
+
namespace parsedtests {
class basic1 {
public:
- void _test( const BSONObj& in ){
+ void _test( const BSONObj& in ) {
ParsedQuery q( "a.b" , 5 , 6 , 9 , in , BSONObj() );
ASSERT_EQUALS( BSON( "x" << 5 ) , q.getFilter() );
}
- void run(){
+ void run() {
_test( BSON( "x" << 5 ) );
_test( BSON( "query" << BSON( "x" << 5 ) ) );
_test( BSON( "$query" << BSON( "x" << 5 ) ) );
@@ -1090,23 +1087,23 @@ namespace QueryTests {
namespace queryobjecttests {
class names1 {
public:
- void run(){
+ void run() {
ASSERT_EQUALS( BSON( "x" << 1 ) , QUERY( "query" << BSON( "x" << 1 ) ).getFilter() );
ASSERT_EQUALS( BSON( "x" << 1 ) , QUERY( "$query" << BSON( "x" << 1 ) ).getFilter() );
}
-
+
};
}
class OrderingTest {
public:
- void run(){
+ void run() {
{
Ordering o = Ordering::make( BSON( "a" << 1 << "b" << -1 << "c" << 1 ) );
ASSERT_EQUALS( 1 , o.get(0) );
ASSERT_EQUALS( -1 , o.get(1) );
ASSERT_EQUALS( 1 , o.get(2) );
-
+
ASSERT( ! o.descending( 1 ) );
ASSERT( o.descending( 1 << 1 ) );
ASSERT( ! o.descending( 1 << 2 ) );
@@ -1117,7 +1114,7 @@ namespace QueryTests {
ASSERT_EQUALS( 1 , o.get(0) );
ASSERT_EQUALS( 1 , o.get(1) );
ASSERT_EQUALS( -1 , o.get(2) );
-
+
ASSERT( ! o.descending( 1 ) );
ASSERT( ! o.descending( 1 << 1 ) );
ASSERT( o.descending( 1 << 2 ) );
@@ -1126,12 +1123,100 @@ namespace QueryTests {
}
};
+ namespace proj { // Projection tests
+
+ class T1 {
+ public:
+ void run() {
+
+ Projection m;
+ m.init( BSON( "a" << 1 ) );
+ ASSERT_EQUALS( BSON( "a" << 5 ) , m.transform( BSON( "x" << 1 << "a" << 5 ) ) );
+ }
+ };
+
+ class K1 {
+ public:
+ void run() {
+
+ Projection m;
+ m.init( BSON( "a" << 1 ) );
+
+ scoped_ptr<Projection::KeyOnly> x( m.checkKey( BSON( "a" << 1 ) ) );
+ ASSERT( ! x );
+
+ x.reset( m.checkKey( BSON( "a" << 1 << "_id" << 1 ) ) );
+ ASSERT( x );
+
+ ASSERT_EQUALS( BSON( "a" << 5 << "_id" << 17 ) ,
+ x->hydrate( BSON( "" << 5 << "" << 17 ) ) );
+
+ x.reset( m.checkKey( BSON( "a" << 1 << "x" << 1 << "_id" << 1 ) ) );
+ ASSERT( x );
+
+ ASSERT_EQUALS( BSON( "a" << 5 << "_id" << 17 ) ,
+ x->hydrate( BSON( "" << 5 << "" << 123 << "" << 17 ) ) );
+
+ }
+ };
+
+ class K2 {
+ public:
+ void run() {
+
+ Projection m;
+ m.init( BSON( "a" << 1 << "_id" << 0 ) );
+
+ scoped_ptr<Projection::KeyOnly> x( m.checkKey( BSON( "a" << 1 ) ) );
+ ASSERT( x );
+
+ ASSERT_EQUALS( BSON( "a" << 17 ) ,
+ x->hydrate( BSON( "" << 17 ) ) );
+
+ x.reset( m.checkKey( BSON( "x" << 1 << "a" << 1 << "_id" << 1 ) ) );
+ ASSERT( x );
+
+ ASSERT_EQUALS( BSON( "a" << 123 ) ,
+ x->hydrate( BSON( "" << 5 << "" << 123 << "" << 17 ) ) );
+
+ }
+ };
+
+
+ class K3 {
+ public:
+ void run() {
+
+ {
+ Projection m;
+ m.init( BSON( "a" << 1 << "_id" << 0 ) );
+
+ scoped_ptr<Projection::KeyOnly> x( m.checkKey( BSON( "a" << 1 << "x.a" << 1 ) ) );
+ ASSERT( x );
+ }
+
+
+ {
+ // TODO: this is temporary SERVER-2104
+ Projection m;
+ m.init( BSON( "x.a" << 1 << "_id" << 0 ) );
+
+ scoped_ptr<Projection::KeyOnly> x( m.checkKey( BSON( "a" << 1 << "x.a" << 1 ) ) );
+ ASSERT( ! x );
+ }
+
+ }
+ };
+
+
+ }
+
class All : public Suite {
public:
All() : Suite( "query" ) {
}
- void setupTests(){
+ void setupTests() {
add< CountBasic >();
add< CountQuery >();
add< CountFields >();
@@ -1176,14 +1261,19 @@ namespace QueryTests {
add< FindingStart >();
add< FindingStartPartiallyFull >();
add< WhatsMyUri >();
-
+
add< parsedtests::basic1 >();
-
+
add< queryobjecttests::names1 >();
add< OrderingTest >();
+
+ add< proj::T1 >();
+ add< proj::K1 >();
+ add< proj::K2 >();
+ add< proj::K3 >();
}
} myall;
-
+
} // namespace QueryTests
diff --git a/dbtests/repltests.cpp b/dbtests/repltests.cpp
index a190dc8..c6ffba2 100644
--- a/dbtests/repltests.cpp
+++ b/dbtests/repltests.cpp
@@ -34,13 +34,13 @@ namespace ReplTests {
BSONObj f( const char *s ) {
return fromjson( s );
- }
-
+ }
+
class Base {
dblock lk;
Client::Context _context;
public:
- Base() : _context( ns() ){
+ Base() : _context( ns() ) {
replSettings.master = true;
createOplog();
ensureHaveIdIndex( ns() );
@@ -50,7 +50,8 @@ namespace ReplTests {
replSettings.master = false;
deleteAll( ns() );
deleteAll( cllNS() );
- } catch ( ... ) {
+ }
+ catch ( ... ) {
FAIL( "Exception while cleaning up test" );
}
}
@@ -63,7 +64,7 @@ namespace ReplTests {
}
DBDirectClient *client() const { return &client_; }
BSONObj one( const BSONObj &query = BSONObj() ) const {
- return client()->findOne( ns(), query );
+ return client()->findOne( ns(), query );
}
void checkOne( const BSONObj &o ) const {
check( o, one( o ) );
@@ -78,11 +79,11 @@ namespace ReplTests {
void check( const BSONObj &expected, const BSONObj &got ) const {
if ( expected.woCompare( got ) ) {
out() << "expected: " << expected.toString()
- << ", got: " << got.toString() << endl;
+ << ", got: " << got.toString() << endl;
}
ASSERT_EQUALS( expected , got );
}
- BSONObj oneOp() const {
+ BSONObj oneOp() const {
return client()->findOne( cllNS(), BSONObj() );
}
int count() const {
@@ -131,7 +132,7 @@ namespace ReplTests {
out() << "all for " << ns << endl;
for(; c->ok(); c->advance() ) {
out() << c->current().toString() << endl;
- }
+ }
}
// These deletes don't get logged.
static void deleteAll( const char *ns ) {
@@ -143,7 +144,7 @@ namespace ReplTests {
toDelete.push_back( c->currLoc() );
}
for( vector< DiskLoc >::iterator i = toDelete.begin(); i != toDelete.end(); ++i ) {
- theDataFileMgr.deleteRecord( ns, i->rec(), *i, true );
+ theDataFileMgr.deleteRecord( ns, i->rec(), *i, true );
}
}
static void insert( const BSONObj &o, bool god = false ) {
@@ -163,7 +164,7 @@ namespace ReplTests {
static DBDirectClient client_;
};
DBDirectClient Base::client_;
-
+
class LogBasic : public Base {
public:
void run() {
@@ -172,9 +173,9 @@ namespace ReplTests {
ASSERT_EQUALS( 2, opCount() );
}
};
-
+
namespace Idempotence {
-
+
class Base : public ReplTests::Base {
public:
virtual ~Base() {}
@@ -186,7 +187,7 @@ namespace ReplTests {
applyAllOperations();
check();
ASSERT_EQUALS( nOps, opCount() );
-
+
reset();
applyAllOperations();
check();
@@ -200,7 +201,7 @@ namespace ReplTests {
virtual void check() const = 0;
virtual void reset() const = 0;
};
-
+
class InsertTimestamp : public Base {
public:
void doIt() const {
@@ -221,7 +222,7 @@ namespace ReplTests {
private:
mutable Date_t date_;
};
-
+
class InsertAutoId : public Base {
public:
InsertAutoId() : o_( fromjson( "{\"a\":\"b\"}" ) ) {}
@@ -248,12 +249,12 @@ namespace ReplTests {
checkOne( o_ );
}
};
-
+
class InsertTwo : public Base {
public:
- InsertTwo() :
- o_( fromjson( "{'_id':1,a:'b'}" ) ),
- t_( fromjson( "{'_id':2,c:'d'}" ) ) {}
+ InsertTwo() :
+ o_( fromjson( "{'_id':1,a:'b'}" ) ),
+ t_( fromjson( "{'_id':2,c:'d'}" ) ) {}
void doIt() const {
vector< BSONObj > v;
v.push_back( o_ );
@@ -287,7 +288,7 @@ namespace ReplTests {
deleteAll( ns() );
}
private:
- BSONObj o_;
+ BSONObj o_;
};
class UpdateTimestamp : public Base {
@@ -311,14 +312,14 @@ namespace ReplTests {
private:
mutable Date_t date_;
};
-
+
class UpdateSameField : public Base {
public:
UpdateSameField() :
- q_( fromjson( "{a:'b'}" ) ),
- o1_( wid( "{a:'b'}" ) ),
- o2_( wid( "{a:'b'}" ) ),
- u_( fromjson( "{a:'c'}" ) ){}
+ q_( fromjson( "{a:'b'}" ) ),
+ o1_( wid( "{a:'b'}" ) ),
+ o2_( wid( "{a:'b'}" ) ),
+ u_( fromjson( "{a:'c'}" ) ) {}
void doIt() const {
client()->update( ns(), q_, u_ );
}
@@ -334,14 +335,14 @@ namespace ReplTests {
}
private:
BSONObj q_, o1_, o2_, u_;
- };
-
+ };
+
class UpdateSameFieldWithId : public Base {
public:
UpdateSameFieldWithId() :
- o_( fromjson( "{'_id':1,a:'b'}" ) ),
- q_( fromjson( "{a:'b'}" ) ),
- u_( fromjson( "{'_id':1,a:'c'}" ) ){}
+ o_( fromjson( "{'_id':1,a:'b'}" ) ),
+ q_( fromjson( "{a:'b'}" ) ),
+ u_( fromjson( "{'_id':1,a:'c'}" ) ) {}
void doIt() const {
client()->update( ns(), q_, u_ );
}
@@ -356,14 +357,14 @@ namespace ReplTests {
insert( fromjson( "{'_id':2,a:'b'}" ) );
}
private:
- BSONObj o_, q_, u_;
- };
+ BSONObj o_, q_, u_;
+ };
class UpdateSameFieldExplicitId : public Base {
public:
UpdateSameFieldExplicitId() :
- o_( fromjson( "{'_id':1,a:'b'}" ) ),
- u_( fromjson( "{'_id':1,a:'c'}" ) ){}
+ o_( fromjson( "{'_id':1,a:'b'}" ) ),
+ u_( fromjson( "{'_id':1,a:'c'}" ) ) {}
void doIt() const {
client()->update( ns(), o_, u_ );
}
@@ -376,46 +377,15 @@ namespace ReplTests {
insert( o_ );
}
protected:
- BSONObj o_, u_;
- };
-
- class UpdateId : public UpdateSameFieldExplicitId {
- public:
- UpdateId() {
- o_ = fromjson( "{'_id':1}" );
- u_ = fromjson( "{'_id':2}" );
- }
- };
-
- class UpdateId2 : public ReplTests::Base {
- public:
- UpdateId2() :
- o_( fromjson( "{'_id':1}" ) ),
- u_( fromjson( "{'_id':2}" ) ){}
- void run() {
- deleteAll( ns() );
- insert( o_ );
- client()->update( ns(), o_, u_ );
- ASSERT_EQUALS( 1, count() );
- checkOne( u_ );
-
- deleteAll( ns() );
- insert( o_ );
- insert( u_ ); // simulate non snapshot replication, then op application
- applyAllOperations();
- ASSERT_EQUALS( 1, count() );
- checkOne( u_ );
- }
- protected:
- BSONObj o_, u_;
+ BSONObj o_, u_;
};
class UpdateDifferentFieldExplicitId : public Base {
public:
UpdateDifferentFieldExplicitId() :
- o_( fromjson( "{'_id':1,a:'b'}" ) ),
- q_( fromjson( "{'_id':1}" ) ),
- u_( fromjson( "{'_id':1,a:'c'}" ) ){}
+ o_( fromjson( "{'_id':1,a:'b'}" ) ),
+ q_( fromjson( "{'_id':1}" ) ),
+ u_( fromjson( "{'_id':1,a:'c'}" ) ) {}
void doIt() const {
client()->update( ns(), q_, u_ );
}
@@ -428,28 +398,28 @@ namespace ReplTests {
insert( o_ );
}
protected:
- BSONObj o_, q_, u_;
- };
-
+ BSONObj o_, q_, u_;
+ };
+
class UpsertUpdateNoMods : public UpdateDifferentFieldExplicitId {
void doIt() const {
client()->update( ns(), q_, u_, true );
}
};
-
+
class UpsertInsertNoMods : public InsertAutoId {
void doIt() const {
client()->update( ns(), fromjson( "{a:'c'}" ), o_, true );
}
};
-
+
class UpdateSet : public Base {
public:
UpdateSet() :
- o_( fromjson( "{'_id':1,a:5}" ) ),
- q_( fromjson( "{a:5}" ) ),
- u_( fromjson( "{$set:{a:7}}" ) ),
- ou_( fromjson( "{'_id':1,a:7}" ) ) {}
+ o_( fromjson( "{'_id':1,a:5}" ) ),
+ q_( fromjson( "{a:5}" ) ),
+ u_( fromjson( "{$set:{a:7}}" ) ),
+ ou_( fromjson( "{'_id':1,a:7}" ) ) {}
void doIt() const {
client()->update( ns(), q_, u_ );
}
@@ -462,16 +432,16 @@ namespace ReplTests {
insert( o_ );
}
protected:
- BSONObj o_, q_, u_, ou_;
+ BSONObj o_, q_, u_, ou_;
};
-
+
class UpdateInc : public Base {
public:
UpdateInc() :
- o_( fromjson( "{'_id':1,a:5}" ) ),
- q_( fromjson( "{a:5}" ) ),
- u_( fromjson( "{$inc:{a:3}}" ) ),
- ou_( fromjson( "{'_id':1,a:8}" ) ) {}
+ o_( fromjson( "{'_id':1,a:5}" ) ),
+ q_( fromjson( "{a:5}" ) ),
+ u_( fromjson( "{$inc:{a:3}}" ) ),
+ ou_( fromjson( "{'_id':1,a:8}" ) ) {}
void doIt() const {
client()->update( ns(), q_, u_ );
}
@@ -484,16 +454,16 @@ namespace ReplTests {
insert( o_ );
}
protected:
- BSONObj o_, q_, u_, ou_;
+ BSONObj o_, q_, u_, ou_;
};
class UpdateInc2 : public Base {
public:
UpdateInc2() :
- o_( fromjson( "{'_id':1,a:5}" ) ),
- q_( fromjson( "{a:5}" ) ),
- u_( fromjson( "{$inc:{a:3},$set:{x:5}}" ) ),
- ou_( fromjson( "{'_id':1,a:8,x:5}" ) ) {}
+ o_( fromjson( "{'_id':1,a:5}" ) ),
+ q_( fromjson( "{a:5}" ) ),
+ u_( fromjson( "{$inc:{a:3},$set:{x:5}}" ) ),
+ ou_( fromjson( "{'_id':1,a:8,x:5}" ) ) {}
void doIt() const {
client()->update( ns(), q_, u_ );
}
@@ -506,16 +476,16 @@ namespace ReplTests {
insert( o_ );
}
protected:
- BSONObj o_, q_, u_, ou_;
+ BSONObj o_, q_, u_, ou_;
};
-
+
class IncEmbedded : public Base {
public:
IncEmbedded() :
- o_( fromjson( "{'_id':1,a:{b:3},b:{b:1}}" ) ),
- q_( fromjson( "{'_id':1}" ) ),
- u_( fromjson( "{$inc:{'a.b':1,'b.b':1}}" ) ),
- ou_( fromjson( "{'_id':1,a:{b:4},b:{b:2}}" ) )
+ o_( fromjson( "{'_id':1,a:{b:3},b:{b:1}}" ) ),
+ q_( fromjson( "{'_id':1}" ) ),
+ u_( fromjson( "{$inc:{'a.b':1,'b.b':1}}" ) ),
+ ou_( fromjson( "{'_id':1,a:{b:4},b:{b:2}}" ) )
{}
void doIt() const {
client()->update( ns(), q_, u_ );
@@ -529,16 +499,16 @@ namespace ReplTests {
insert( o_ );
}
protected:
- BSONObj o_, q_, u_, ou_;
+ BSONObj o_, q_, u_, ou_;
};
class IncCreates : public Base {
public:
IncCreates() :
- o_( fromjson( "{'_id':1}" ) ),
- q_( fromjson( "{'_id':1}" ) ),
- u_( fromjson( "{$inc:{'a':1}}" ) ),
- ou_( fromjson( "{'_id':1,a:1}") )
+ o_( fromjson( "{'_id':1}" ) ),
+ q_( fromjson( "{'_id':1}" ) ),
+ u_( fromjson( "{$inc:{'a':1}}" ) ),
+ ou_( fromjson( "{'_id':1,a:1}") )
{}
void doIt() const {
client()->update( ns(), q_, u_ );
@@ -552,16 +522,16 @@ namespace ReplTests {
insert( o_ );
}
protected:
- BSONObj o_, q_, u_, ou_;
+ BSONObj o_, q_, u_, ou_;
};
class UpsertInsertIdMod : public Base {
public:
UpsertInsertIdMod() :
- q_( fromjson( "{'_id':5,a:4}" ) ),
- u_( fromjson( "{$inc:{a:3}}" ) ),
- ou_( fromjson( "{'_id':5,a:7}" ) ) {}
+ q_( fromjson( "{'_id':5,a:4}" ) ),
+ u_( fromjson( "{$inc:{a:3}}" ) ),
+ ou_( fromjson( "{'_id':5,a:7}" ) ) {}
void doIt() const {
client()->update( ns(), q_, u_, true );
}
@@ -573,15 +543,15 @@ namespace ReplTests {
deleteAll( ns() );
}
protected:
- BSONObj q_, u_, ou_;
+ BSONObj q_, u_, ou_;
};
-
+
class UpsertInsertSet : public Base {
public:
UpsertInsertSet() :
- q_( fromjson( "{a:5}" ) ),
- u_( fromjson( "{$set:{a:7}}" ) ),
- ou_( fromjson( "{a:7}" ) ) {}
+ q_( fromjson( "{a:5}" ) ),
+ u_( fromjson( "{$set:{a:7}}" ) ),
+ ou_( fromjson( "{a:7}" ) ) {}
void doIt() const {
client()->update( ns(), q_, u_, true );
}
@@ -594,15 +564,15 @@ namespace ReplTests {
insert( fromjson( "{'_id':7,a:7}" ) );
}
protected:
- BSONObj o_, q_, u_, ou_;
+ BSONObj o_, q_, u_, ou_;
};
-
+
class UpsertInsertInc : public Base {
public:
UpsertInsertInc() :
- q_( fromjson( "{a:5}" ) ),
- u_( fromjson( "{$inc:{a:3}}" ) ),
- ou_( fromjson( "{a:8}" ) ) {}
+ q_( fromjson( "{a:5}" ) ),
+ u_( fromjson( "{$inc:{a:3}}" ) ),
+ ou_( fromjson( "{a:8}" ) ) {}
void doIt() const {
client()->update( ns(), q_, u_, true );
}
@@ -614,38 +584,38 @@ namespace ReplTests {
deleteAll( ns() );
}
protected:
- BSONObj o_, q_, u_, ou_;
+ BSONObj o_, q_, u_, ou_;
};
-
+
class MultiInc : public Base {
public:
-
+
string s() const {
stringstream ss;
auto_ptr<DBClientCursor> cc = client()->query( ns() , Query().sort( BSON( "_id" << 1 ) ) );
bool first = true;
- while ( cc->more() ){
+ while ( cc->more() ) {
if ( first ) first = false;
else ss << ",";
-
+
BSONObj o = cc->next();
ss << o["x"].numberInt();
}
return ss.str();
}
-
+
void doIt() const {
client()->insert( ns(), BSON( "_id" << 1 << "x" << 1 ) );
client()->insert( ns(), BSON( "_id" << 2 << "x" << 5 ) );
-
+
ASSERT_EQUALS( "1,5" , s() );
-
+
client()->update( ns() , BSON( "_id" << 1 ) , BSON( "$inc" << BSON( "x" << 1 ) ) );
ASSERT_EQUALS( "2,5" , s() );
-
+
client()->update( ns() , BSONObj() , BSON( "$inc" << BSON( "x" << 1 ) ) );
ASSERT_EQUALS( "3,5" , s() );
-
+
client()->update( ns() , BSONObj() , BSON( "$inc" << BSON( "x" << 1 ) ) , false , true );
check();
}
@@ -653,18 +623,18 @@ namespace ReplTests {
void check() const {
ASSERT_EQUALS( "4,6" , s() );
}
-
+
void reset() const {
deleteAll( ns() );
}
};
-
+
class UpdateWithoutPreexistingId : public Base {
public:
UpdateWithoutPreexistingId() :
- o_( fromjson( "{a:5}" ) ),
- u_( fromjson( "{a:5}" ) ),
- ot_( fromjson( "{b:4}" ) ) {}
+ o_( fromjson( "{a:5}" ) ),
+ u_( fromjson( "{a:5}" ) ),
+ ot_( fromjson( "{b:4}" ) ) {}
void doIt() const {
client()->update( ns(), o_, u_ );
}
@@ -679,15 +649,15 @@ namespace ReplTests {
insert( o_, true );
}
protected:
- BSONObj o_, u_, ot_;
- };
-
+ BSONObj o_, u_, ot_;
+ };
+
class Remove : public Base {
public:
Remove() :
- o1_( f( "{\"_id\":\"010101010101010101010101\",\"a\":\"b\"}" ) ),
- o2_( f( "{\"_id\":\"010101010101010101010102\",\"a\":\"b\"}" ) ),
- q_( f( "{\"a\":\"b\"}" ) ) {}
+ o1_( f( "{\"_id\":\"010101010101010101010101\",\"a\":\"b\"}" ) ),
+ o2_( f( "{\"_id\":\"010101010101010101010102\",\"a\":\"b\"}" ) ),
+ q_( f( "{\"a\":\"b\"}" ) ) {}
void doIt() const {
client()->remove( ns(), q_ );
}
@@ -700,23 +670,23 @@ namespace ReplTests {
insert( o2_ );
}
protected:
- BSONObj o1_, o2_, q_;
+ BSONObj o1_, o2_, q_;
};
-
+
class RemoveOne : public Remove {
void doIt() const {
client()->remove( ns(), q_, true );
- }
+ }
void check() const {
ASSERT_EQUALS( 1, count() );
}
};
-
+
class FailingUpdate : public Base {
public:
FailingUpdate() :
- o_( fromjson( "{'_id':1,a:'b'}" ) ),
- u_( fromjson( "{'_id':1,c:'d'}" ) ) {}
+ o_( fromjson( "{'_id':1,a:'b'}" ) ),
+ u_( fromjson( "{'_id':1,c:'d'}" ) ) {}
void doIt() const {
client()->update( ns(), o_, u_ );
client()->insert( ns(), o_ );
@@ -731,7 +701,7 @@ namespace ReplTests {
protected:
BSONObj o_, u_;
};
-
+
class SetNumToStr : public Base {
public:
void doIt() const {
@@ -746,7 +716,7 @@ namespace ReplTests {
insert( BSON( "_id" << 0 << "a" << 4.0 ) );
}
};
-
+
class Push : public Base {
public:
void doIt() const {
@@ -760,9 +730,9 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:[4]}" ) );
- }
+ }
};
-
+
class PushUpsert : public Base {
public:
void doIt() const {
@@ -776,7 +746,7 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:[4]}" ) );
- }
+ }
};
class MultiPush : public Base {
@@ -792,7 +762,7 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:[4]}" ) );
- }
+ }
};
class EmptyPush : public Base {
@@ -808,13 +778,13 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0}" ) );
- }
+ }
};
class PushAll : public Base {
public:
void doIt() const {
- client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$pushAll:{a:[5.0,6.0]}}" ) );
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$pushAll:{a:[5.0,6.0]}}" ) );
}
using ReplTests::Base::check;
void check() const {
@@ -824,13 +794,13 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:[4]}" ) );
- }
+ }
};
-
+
class PushAllUpsert : public Base {
public:
void doIt() const {
- client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$pushAll:{a:[5.0,6.0]}}" ), true );
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$pushAll:{a:[5.0,6.0]}}" ), true );
}
using ReplTests::Base::check;
void check() const {
@@ -840,7 +810,7 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:[4]}" ) );
- }
+ }
};
class EmptyPushAll : public Base {
@@ -856,7 +826,7 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0}" ) );
- }
+ }
};
class Pull : public Base {
@@ -872,9 +842,9 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:[4,5]}" ) );
- }
+ }
};
-
+
class PullNothing : public Base {
public:
void doIt() const {
@@ -888,13 +858,13 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:[4,5]}" ) );
- }
+ }
};
-
+
class PullAll : public Base {
public:
void doIt() const {
- client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$pullAll:{a:[4,5]}}" ) );
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$pullAll:{a:[4,5]}}" ) );
}
using ReplTests::Base::check;
void check() const {
@@ -904,7 +874,7 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:[4,5,6]}" ) );
- }
+ }
};
class Pop : public Base {
@@ -920,7 +890,7 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:[4,5,6]}" ) );
- }
+ }
};
class PopReverse : public Base {
@@ -936,7 +906,7 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:[4,5,6]}" ) );
- }
+ }
};
class BitOp : public Base {
@@ -952,13 +922,78 @@ namespace ReplTests {
void reset() const {
deleteAll( ns() );
insert( fromjson( "{'_id':0,a:3}" ) );
- }
+ }
+ };
+
+ class Rename : public Base {
+ public:
+ void doIt() const {
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$rename:{a:'b'}}" ) );
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$set:{a:50}}" ) );
+ }
+ using ReplTests::Base::check;
+ void check() const {
+ ASSERT_EQUALS( 1, count() );
+ check( BSON( "_id" << 0 << "a" << 50 << "b" << 3 ) , one( fromjson( "{'_id':0}" ) ) );
+ }
+ void reset() const {
+ deleteAll( ns() );
+ insert( fromjson( "{'_id':0,a:3}" ) );
+ }
+ };
+
+ class RenameReplace : public Base {
+ public:
+ void doIt() const {
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$rename:{a:'b'}}" ) );
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$set:{a:50}}" ) );
+ }
+ using ReplTests::Base::check;
+ void check() const {
+ ASSERT_EQUALS( 1, count() );
+ check( BSON( "_id" << 0 << "a" << 50 << "b" << 3 ) , one( fromjson( "{'_id':0}" ) ) );
+ }
+ void reset() const {
+ deleteAll( ns() );
+ insert( fromjson( "{'_id':0,a:3,b:100}" ) );
+ }
+ };
+
+ class RenameOverwrite : public Base {
+ public:
+ void doIt() const {
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$rename:{a:'b'}}" ) );
+ }
+ using ReplTests::Base::check;
+ void check() const {
+ ASSERT_EQUALS( 1, count() );
+ check( BSON( "_id" << 0 << "b" << 3 << "z" << 1 ) , one( fromjson( "{'_id':0}" ) ) );
+ }
+ void reset() const {
+ deleteAll( ns() );
+ insert( fromjson( "{'_id':0,z:1,a:3}" ) );
+ }
+ };
+
+ class NoRename : public Base {
+ public:
+ void doIt() const {
+ client()->update( ns(), BSON( "_id" << 0 ), fromjson( "{$rename:{c:'b'},$set:{z:1}}" ) );
+ }
+ using ReplTests::Base::check;
+ void check() const {
+ ASSERT_EQUALS( 1, count() );
+ check( BSON( "_id" << 0 << "a" << 3 << "z" << 1 ) , one( fromjson( "{'_id':0}" ) ) );
+ }
+ void reset() const {
+ deleteAll( ns() );
+ insert( fromjson( "{'_id':0,a:3}" ) );
+ }
};
-
} // namespace Idempotence
-
+
class DeleteOpIsIdBased : public Base {
public:
void run() {
@@ -968,21 +1003,21 @@ namespace ReplTests {
client()->remove( ns(), BSON( "a" << 10 ) );
ASSERT_EQUALS( 1U, client()->count( ns(), BSONObj() ) );
insert( BSON( "_id" << 0 << "a" << 11 ) );
- insert( BSON( "_id" << 2 << "a" << 10 ) );
+ insert( BSON( "_id" << 2 << "a" << 10 ) );
insert( BSON( "_id" << 3 << "a" << 10 ) );
-
+
applyAllOperations();
ASSERT_EQUALS( 2U, client()->count( ns(), BSONObj() ) );
ASSERT( !one( BSON( "_id" << 1 ) ).isEmpty() );
ASSERT( !one( BSON( "_id" << 2 ) ).isEmpty() );
}
};
-
+
class DbIdsTest {
public:
void run() {
Client::Context ctx( "unittests.repltest.DbIdsTest" );
-
+
s_.reset( new DbIds( "local.temp.DbIdsTest" ) );
s_->reset();
check( false, false, false );
@@ -991,7 +1026,7 @@ namespace ReplTests {
check( true, false, false );
s_->set( "a", BSON( "_id" << 4 ), false );
check( false, false, false );
-
+
s_->set( "b", BSON( "_id" << 4 ), true );
check( false, true, false );
s_->set( "b", BSON( "_id" << 4 ), false );
@@ -1009,7 +1044,7 @@ namespace ReplTests {
s_->reset();
check( false, false, false );
-
+
s_->set( "a", BSON( "_id" << 4 ), true );
s_->set( "a", BSON( "_id" << 4 ), true );
check( true, false, false );
@@ -1020,17 +1055,17 @@ namespace ReplTests {
void check( bool one, bool two, bool three ) {
ASSERT_EQUALS( one, s_->get( "a", BSON( "_id" << 4 ) ) );
ASSERT_EQUALS( two, s_->get( "b", BSON( "_id" << 4 ) ) );
- ASSERT_EQUALS( three, s_->get( "a", BSON( "_id" << 5 ) ) );
+ ASSERT_EQUALS( three, s_->get( "a", BSON( "_id" << 5 ) ) );
}
dblock lk_;
auto_ptr< DbIds > s_;
};
-
+
class MemIdsTest {
public:
void run() {
int n = sizeof( BSONObj ) + BSON( "_id" << 4 ).objsize();
-
+
s_.reset();
ASSERT_EQUALS( 0, s_.roughSize() );
ASSERT( !s_.get( "a", BSON( "_id" << 4 ) ) );
@@ -1057,7 +1092,7 @@ namespace ReplTests {
public:
void run() {
Client::Context ctx( "unittests.repltests.IdTrackerTest" );
-
+
ASSERT( s_.inMem() );
s_.reset( 4 * sizeof( BSONObj ) - 1 );
s_.haveId( "a", BSON( "_id" << 0 ), true );
@@ -1069,34 +1104,34 @@ namespace ReplTests {
s_.mayUpgradeStorage();
ASSERT( !s_.inMem() );
check();
-
+
s_.haveId( "a", BSON( "_id" << 1 ), false );
ASSERT( !s_.haveId( "a", BSON( "_id" << 1 ) ) );
s_.haveId( "a", BSON( "_id" << 1 ), true );
check();
- ASSERT( !s_.inMem() );
-
+ ASSERT( !s_.inMem() );
+
s_.reset( 4 * sizeof( BSONObj ) - 1 );
s_.mayUpgradeStorage();
- ASSERT( s_.inMem() );
+ ASSERT( s_.inMem() );
}
private:
void check() {
ASSERT( s_.haveId( "a", BSON( "_id" << 0 ) ) );
ASSERT( s_.haveId( "a", BSON( "_id" << 1 ) ) );
ASSERT( s_.haveId( "b", BSON( "_id" << 0 ) ) );
- ASSERT( s_.haveModId( "b", BSON( "_id" << 0 ) ) );
+ ASSERT( s_.haveModId( "b", BSON( "_id" << 0 ) ) );
}
dblock lk_;
IdTracker s_;
};
-
+
class All : public Suite {
public:
- All() : Suite( "repl" ){
+ All() : Suite( "repl" ) {
}
-
- void setupTests(){
+
+ void setupTests() {
add< LogBasic >();
add< Idempotence::InsertTimestamp >();
add< Idempotence::InsertAutoId >();
@@ -1107,8 +1142,6 @@ namespace ReplTests {
add< Idempotence::UpdateSameField >();
add< Idempotence::UpdateSameFieldWithId >();
add< Idempotence::UpdateSameFieldExplicitId >();
- add< Idempotence::UpdateId >();
- add< Idempotence::UpdateId2 >();
add< Idempotence::UpdateDifferentFieldExplicitId >();
add< Idempotence::UpsertUpdateNoMods >();
add< Idempotence::UpsertInsertNoMods >();
@@ -1140,12 +1173,16 @@ namespace ReplTests {
add< Idempotence::Pop >();
add< Idempotence::PopReverse >();
add< Idempotence::BitOp >();
+ add< Idempotence::Rename >();
+ add< Idempotence::RenameReplace >();
+ add< Idempotence::RenameOverwrite >();
+ add< Idempotence::NoRename >();
add< DeleteOpIsIdBased >();
add< DbIdsTest >();
add< MemIdsTest >();
add< IdTrackerTest >();
}
} myall;
-
+
} // namespace ReplTests
diff --git a/dbtests/sharding.cpp b/dbtests/sharding.cpp
index 2473366..19edd55 100644
--- a/dbtests/sharding.cpp
+++ b/dbtests/sharding.cpp
@@ -27,17 +27,17 @@ namespace ShardingTests {
namespace serverandquerytests {
class test1 {
public:
- void run(){
+ void run() {
ServerAndQuery a( "foo:1" , BSON( "a" << GT << 0 << LTE << 100 ) );
ServerAndQuery b( "foo:1" , BSON( "a" << GT << 200 << LTE << 1000 ) );
-
+
ASSERT( a < b );
ASSERT( ! ( b < a ) );
set<ServerAndQuery> s;
s.insert( a );
s.insert( b );
-
+
ASSERT_EQUALS( (unsigned int)2 , s.size() );
}
};
@@ -45,12 +45,12 @@ namespace ShardingTests {
class All : public Suite {
public:
- All() : Suite( "sharding" ){
+ All() : Suite( "sharding" ) {
}
- void setupTests(){
+ void setupTests() {
add< serverandquerytests::test1 >();
}
} myall;
-
+
}
diff --git a/dbtests/socktests.cpp b/dbtests/socktests.cpp
index 267b1d6..5cd42f5 100644
--- a/dbtests/socktests.cpp
+++ b/dbtests/socktests.cpp
@@ -19,7 +19,6 @@
#include "pch.h"
#include "../util/sock.h"
-
#include "dbtests.h"
namespace SockTests {
@@ -30,16 +29,20 @@ namespace SockTests {
ASSERT_EQUALS( "127.0.0.1", hostbyname( "localhost" ) );
ASSERT_EQUALS( "127.0.0.1", hostbyname( "127.0.0.1" ) );
// ASSERT_EQUALS( "::1", hostbyname( "::1" ) ); // IPv6 disabled at runtime by default.
+
+ HostAndPort h("asdfasdfasdf_no_such_host");
+ // this fails uncomment when fixed.
+ ASSERT( !h.isSelf() );
}
};
-
+
class All : public Suite {
public:
- All() : Suite( "sock" ){}
- void setupTests(){
+ All() : Suite( "sock" ) {}
+ void setupTests() {
add< HostByName >();
}
} myall;
-
+
} // namespace SockTests
diff --git a/dbtests/spin_lock_test.cpp b/dbtests/spin_lock_test.cpp
index d053d61..01eb7b3 100644
--- a/dbtests/spin_lock_test.cpp
+++ b/dbtests/spin_lock_test.cpp
@@ -26,26 +26,26 @@ namespace {
using mongo::SpinLock;
- class LockTester{
+ class LockTester {
public:
LockTester( SpinLock* spin, int* counter )
- : _spin(spin), _counter(counter), _requests(0){}
+ : _spin(spin), _counter(counter), _requests(0) {}
- ~LockTester(){
+ ~LockTester() {
delete _t;
}
- void start( int increments ){
- _t = new boost::thread( boost::bind(&LockTester::test, this, increments) );
+ void start( int increments ) {
+ _t = new boost::thread( boost::bind(&LockTester::test, this, increments) );
}
- void join(){
+ void join() {
if ( _t ) _t->join();
}
- int requests() const{
- return _requests;
- }
+ int requests() const {
+ return _requests;
+ }
private:
SpinLock* _spin; // not owned here
@@ -53,7 +53,7 @@ namespace {
int _requests;
boost::thread* _t;
- void test( int increments ){
+ void test( int increments ) {
while ( increments-- > 0 ) {
_spin->lock();
++(*_counter);
@@ -61,14 +61,14 @@ namespace {
_spin->unlock();
}
}
-
+
LockTester( LockTester& );
LockTester& operator=( LockTester& );
};
- class ConcurrentIncs{
+ class ConcurrentIncs {
public:
- void run(){
+ void run() {
#if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)
@@ -77,37 +77,37 @@ namespace {
const int threads = 64;
const int incs = 10000;
- LockTester* testers[threads];
-
- for ( int i = 0; i < threads; i++ ){
- testers[i] = new LockTester( &spin, &counter );
- }
- for ( int i = 0; i < threads; i++ ){
- testers[i]->start( incs );
- }
- for ( int i = 0; i < threads; i++ ){
- testers[i]->join();
- ASSERT_EQUALS( testers[i]->requests(), incs );
- delete testers[i];
- }
-
- ASSERT_EQUALS( counter, threads*incs );
+ LockTester* testers[threads];
+
+ for ( int i = 0; i < threads; i++ ) {
+ testers[i] = new LockTester( &spin, &counter );
+ }
+ for ( int i = 0; i < threads; i++ ) {
+ testers[i]->start( incs );
+ }
+ for ( int i = 0; i < threads; i++ ) {
+ testers[i]->join();
+ ASSERT_EQUALS( testers[i]->requests(), incs );
+ delete testers[i];
+ }
+
+ ASSERT_EQUALS( counter, threads*incs );
#else
- // WARNING "TODO Missing spin lock in this platform."
- ASSERT( true );
+ // WARNING "TODO Missing spin lock in this platform."
+ ASSERT( true );
+
-
#endif
}
};
- class SpinLockSuite : public Suite{
+ class SpinLockSuite : public Suite {
public:
- SpinLockSuite() : Suite( "spinlock" ){}
+ SpinLockSuite() : Suite( "spinlock" ) {}
- void setupTests(){
+ void setupTests() {
add< ConcurrentIncs >();
}
} spinLockSuite;
diff --git a/dbtests/test.vcproj b/dbtests/test.vcproj
deleted file mode 100644
index c297d85..0000000
--- a/dbtests/test.vcproj
+++ /dev/null
@@ -1,1453 +0,0 @@
-<?xml version="1.0" encoding="Windows-1252"?>
-<VisualStudioProject
- ProjectType="Visual C++"
- Version="9.00"
- Name="test"
- ProjectGUID="{215B2D68-0A70-4D10-8E75-B33010C62A91}"
- RootNamespace="dbtests"
- Keyword="Win32Proj"
- TargetFrameworkVersion="196613"
- >
- <Platforms>
- <Platform
- Name="Win32"
- />
- </Platforms>
- <ToolFiles>
- </ToolFiles>
- <Configurations>
- <Configuration
- Name="Debug|Win32"
- OutputDirectory="$(SolutionDir)$(ConfigurationName)"
- IntermediateDirectory="$(ConfigurationName)"
- ConfigurationType="1"
- UseOfMFC="0"
- UseOfATL="0"
- CharacterSet="1"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- Optimization="0"
- AdditionalIncludeDirectories="&quot;c:\program files\boost\latest&quot;;..\..\js\src;&quot;..\pcre-7.4&quot;;c:\boost;\boost"
- PreprocessorDefinitions="MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;WIN32;_DEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;PCRE_STATIC"
- MinimalRebuild="true"
- BasicRuntimeChecks="3"
- RuntimeLibrary="3"
- UsePrecompiledHeader="2"
- PrecompiledHeaderThrough="pch.h"
- WarningLevel="3"
- Detect64BitPortabilityProblems="false"
- DebugInformationFormat="4"
- DisableSpecificWarnings="4355;4800"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLinkerTool"
- AdditionalDependencies="ws2_32.lib Psapi.lib"
- LinkIncremental="2"
- AdditionalLibraryDirectories="&quot;c:\Program Files\boost\latest\lib&quot;;c:\boost\lib;\boost\lib"
- IgnoreAllDefaultLibraries="false"
- IgnoreDefaultLibraryNames=""
- GenerateDebugInformation="true"
- SubSystem="1"
- TargetMachine="1"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCManifestTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCAppVerifierTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- <Configuration
- Name="Release|Win32"
- OutputDirectory="$(SolutionDir)$(ConfigurationName)"
- IntermediateDirectory="$(ConfigurationName)"
- ConfigurationType="1"
- CharacterSet="1"
- WholeProgramOptimization="1"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- Optimization="2"
- EnableIntrinsicFunctions="true"
- AdditionalIncludeDirectories="&quot;c:\program files\boost\latest&quot;;..\..\js\src;&quot;..\pcre-7.4&quot;;c:\boost;\boost"
- PreprocessorDefinitions="MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;WIN32;NDEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;PCRE_STATIC"
- RuntimeLibrary="0"
- EnableFunctionLevelLinking="true"
- UsePrecompiledHeader="2"
- PrecompiledHeaderThrough="pch.h"
- WarningLevel="3"
- DebugInformationFormat="3"
- DisableSpecificWarnings="4355;4800"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLinkerTool"
- AdditionalDependencies="ws2_32.lib psapi.lib"
- LinkIncremental="1"
- AdditionalLibraryDirectories="&quot;c:\Program Files\boost\latest\lib&quot;;c:\boost\lib;\boost\lib"
- GenerateDebugInformation="true"
- SubSystem="1"
- OptimizeReferences="2"
- EnableCOMDATFolding="2"
- TargetMachine="1"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCManifestTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCAppVerifierTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- </Configurations>
- <References>
- </References>
- <Files>
- <Filter
- Name="misc and third party"
- >
- <File
- RelativePath="..\..\boostw\boost_1_34_1\boost\config\auto_link.hpp"
- >
- </File>
- <File
- RelativePath="..\db\db.rc"
- >
- </File>
- <File
- RelativePath="..\..\js\src\js.lib"
- >
- </File>
- <File
- RelativePath="..\pcre-7.4\pcrecpp.cc"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcrecpp.h"
- >
- </File>
- <File
- RelativePath="..\SConstruct"
- >
- </File>
- <File
- RelativePath="..\targetver.h"
- >
- </File>
- <File
- RelativePath="..\..\boostw\boost_1_34_1\boost\version.hpp"
- >
- </File>
- <Filter
- Name="pcre"
- >
- <File
- RelativePath="..\pcre-7.4\config.h"
- >
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre.h"
- >
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_chartables.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_compile.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_config.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_dfa_exec.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_exec.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_fullinfo.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_get.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_globals.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_info.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_maketables.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_newline.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_ord2utf8.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_refcount.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_scanner.cc"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_stringpiece.cc"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_study.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_tables.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_try_flipped.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_ucp_searchfuncs.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_valid_utf8.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_version.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcre_xclass.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\pcre-7.4\pcreposix.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- </Filter>
- </Filter>
- <Filter
- Name="storage related"
- >
- <File
- RelativePath="..\db\rec.h"
- >
- </File>
- <File
- RelativePath="..\db\reccache.h"
- >
- </File>
- <File
- RelativePath="..\db\reci.h"
- >
- </File>
- <File
- RelativePath="..\db\recstore.h"
- >
- </File>
- <File
- RelativePath="..\db\storage.cpp"
- >
- </File>
- <File
- RelativePath="..\db\storage.h"
- >
- </File>
- </Filter>
- <Filter
- Name="client"
- >
- <File
- RelativePath="..\client\connpool.cpp"
- >
- </File>
- <File
- RelativePath="..\client\connpool.h"
- >
- </File>
- <File
- RelativePath="..\client\dbclient.cpp"
- >
- </File>
- <File
- RelativePath="..\client\dbclient.h"
- >
- </File>
- <File
- RelativePath="..\client\dbclientcursor.cpp"
- >
- </File>
- <File
- RelativePath="..\client\model.h"
- >
- </File>
- <File
- RelativePath="..\client\syncclusterconnection.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="db"
- >
- <File
- RelativePath="..\pch.cpp"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="1"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="1"
- />
- </FileConfiguration>
- </File>
- <Filter
- Name="cpp"
- Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
- UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
- >
- <File
- RelativePath="..\db\client.cpp"
- >
- </File>
- <File
- RelativePath="..\db\clientcursor.cpp"
- >
- </File>
- <File
- RelativePath="..\db\cloner.cpp"
- >
- </File>
- <File
- RelativePath="..\db\commands.cpp"
- >
- </File>
- <File
- RelativePath="..\db\common.cpp"
- >
- </File>
- <File
- RelativePath="..\db\cursor.cpp"
- >
- </File>
- <File
- RelativePath="..\db\database.cpp"
- >
- </File>
- <File
- RelativePath="..\db\dbcommands.cpp"
- >
- </File>
- <File
- RelativePath="..\db\dbeval.cpp"
- >
- </File>
- <File
- RelativePath="..\db\dbhelpers.cpp"
- >
- </File>
- <File
- RelativePath="..\db\dbwebserver.cpp"
- >
- </File>
- <File
- RelativePath="..\db\extsort.cpp"
- >
- </File>
- <File
- RelativePath="..\db\index.cpp"
- >
- </File>
- <File
- RelativePath="..\db\index_geo2d.cpp"
- >
- </File>
- <File
- RelativePath="..\db\indexkey.cpp"
- >
- </File>
- <File
- RelativePath="..\db\instance.cpp"
- >
- </File>
- <File
- RelativePath="..\db\introspect.cpp"
- >
- </File>
- <File
- RelativePath="..\db\jsobj.cpp"
- >
- </File>
- <File
- RelativePath="..\db\json.cpp"
- >
- </File>
- <File
- RelativePath="..\db\lasterror.cpp"
- >
- </File>
- <File
- RelativePath="..\db\matcher.cpp"
- >
- </File>
- <File
- RelativePath="..\util\mmap_win.cpp"
- >
- </File>
- <File
- RelativePath="..\db\namespace.cpp"
- >
- </File>
- <File
- RelativePath="..\db\nonce.cpp"
- >
- </File>
- <File
- RelativePath="..\db\pdfile.cpp"
- >
- </File>
- <File
- RelativePath="..\db\query.cpp"
- >
- </File>
- <File
- RelativePath="..\db\queryoptimizer.cpp"
- >
- </File>
- <File
- RelativePath="..\util\ramstore.cpp"
- >
- </File>
- <File
- RelativePath="..\db\repl.cpp"
- >
- </File>
- <File
- RelativePath="..\db\security.cpp"
- >
- </File>
- <File
- RelativePath="..\db\security_commands.cpp"
- >
- </File>
- <File
- RelativePath="..\db\tests.cpp"
- >
- </File>
- <File
- RelativePath="..\db\update.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="h"
- >
- <File
- RelativePath="..\db\clientcursor.h"
- >
- </File>
- <File
- RelativePath="..\db\cmdline.cpp"
- >
- </File>
- <File
- RelativePath="..\db\cmdline.h"
- >
- </File>
- <File
- RelativePath="..\db\commands.h"
- >
- </File>
- <File
- RelativePath="..\db\concurrency.h"
- >
- </File>
- <File
- RelativePath="..\db\curop.h"
- >
- </File>
- <File
- RelativePath="..\db\cursor.h"
- >
- </File>
- <File
- RelativePath="..\db\database.h"
- >
- </File>
- <File
- RelativePath="..\db\db.h"
- >
- </File>
- <File
- RelativePath="..\db\dbhelpers.h"
- >
- </File>
- <File
- RelativePath="..\db\dbinfo.h"
- >
- </File>
- <File
- RelativePath="..\db\dbmessage.h"
- >
- </File>
- <File
- RelativePath="..\db\diskloc.h"
- >
- </File>
- <File
- RelativePath="..\db\extsort.h"
- >
- </File>
- <File
- RelativePath="..\db\introspect.h"
- >
- </File>
- <File
- RelativePath="..\db\jsobj.h"
- >
- </File>
- <File
- RelativePath="..\db\json.h"
- >
- </File>
- <File
- RelativePath="..\db\matcher.h"
- >
- </File>
- <File
- RelativePath="..\db\matcher_covered.cpp"
- >
- </File>
- <File
- RelativePath="..\grid\message.h"
- >
- </File>
- <File
- RelativePath="..\db\minilex.h"
- >
- </File>
- <File
- RelativePath="..\db\namespace.h"
- >
- </File>
- <File
- RelativePath="..\db\oplog.cpp"
- >
- </File>
- <File
- RelativePath="..\pch.h"
- >
- </File>
- <File
- RelativePath="..\db\pdfile.h"
- >
- </File>
- <File
- RelativePath="..\grid\protocol.h"
- >
- </File>
- <File
- RelativePath="..\db\query.h"
- >
- </File>
- <File
- RelativePath="..\db\queryoptimizer.h"
- >
- </File>
- <File
- RelativePath="..\db\queryutil.cpp"
- >
- </File>
- <File
- RelativePath="..\db\repl.h"
- >
- </File>
- <File
- RelativePath="..\db\repl_block.cpp"
- >
- </File>
- <File
- RelativePath="..\db\replset.h"
- >
- </File>
- <File
- RelativePath="..\db\resource.h"
- >
- </File>
- <File
- RelativePath="..\db\scanandorder.h"
- >
- </File>
- <File
- RelativePath="..\db\security.h"
- >
- </File>
- </Filter>
- </Filter>
- <Filter
- Name="util"
- >
- <File
- RelativePath="..\util\builder.h"
- >
- </File>
- <File
- RelativePath="..\util\debug_util.cpp"
- >
- </File>
- <File
- RelativePath="..\util\file.h"
- >
- </File>
- <File
- RelativePath="..\util\goodies.h"
- >
- </File>
- <File
- RelativePath="..\util\hashtab.h"
- >
- </File>
- <File
- RelativePath="..\db\lasterror.h"
- >
- </File>
- <File
- RelativePath="..\util\log.h"
- >
- </File>
- <File
- RelativePath="..\util\lruishmap.h"
- >
- </File>
- <File
- RelativePath="..\util\md5.h"
- >
- </File>
- <File
- RelativePath="..\util\md5.hpp"
- >
- </File>
- <File
- RelativePath="..\util\miniwebserver.h"
- >
- </File>
- <File
- RelativePath="..\util\mmap.h"
- >
- </File>
- <File
- RelativePath="..\util\sock.h"
- >
- </File>
- <File
- RelativePath="..\util\unittest.h"
- >
- </File>
- <Filter
- Name="cpp"
- >
- <File
- RelativePath="..\util\assert_util.cpp"
- >
- </File>
- <File
- RelativePath="..\util\background.cpp"
- >
- </File>
- <File
- RelativePath="..\util\base64.cpp"
- >
- </File>
- <File
- RelativePath="..\util\httpclient.cpp"
- >
- </File>
- <File
- RelativePath="..\util\md5.c"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- PrecompiledHeaderThrough=""
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\util\md5main.cpp"
- >
- </File>
- <File
- RelativePath="..\util\message.cpp"
- >
- </File>
- <File
- RelativePath="..\util\message_server_port.cpp"
- >
- </File>
- <File
- RelativePath="..\util\miniwebserver.cpp"
- >
- </File>
- <File
- RelativePath="..\util\mmap.cpp"
- >
- </File>
- <File
- RelativePath="..\util\ntservice.cpp"
- >
- </File>
- <File
- RelativePath="..\util\processinfo_win32.cpp"
- >
- </File>
- <File
- RelativePath="..\util\sock.cpp"
- >
- </File>
- <File
- RelativePath="..\util\thread_pool.cpp"
- >
- </File>
- <File
- RelativePath="..\util\util.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="concurrency"
- >
- <File
- RelativePath="..\util\concurrency\list.h"
- >
- </File>
- <File
- RelativePath="..\util\concurrency\msg.h"
- >
- </File>
- <File
- RelativePath="..\util\concurrency\task.cpp"
- >
- </File>
- <File
- RelativePath="..\util\concurrency\task.h"
- >
- </File>
- <File
- RelativePath="..\util\concurrency\value.h"
- >
- </File>
- <File
- RelativePath="..\util\concurrency\vars.cpp"
- >
- </File>
- </Filter>
- </Filter>
- <Filter
- Name="shard"
- >
- <File
- RelativePath="..\s\d_logic.cpp"
- >
- </File>
- <File
- RelativePath="..\s\d_util.cpp"
- >
- </File>
- <File
- RelativePath="..\s\shardconnection.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="scripting"
- >
- <File
- RelativePath="..\scripting\engine.cpp"
- >
- </File>
- <File
- RelativePath="..\scripting\engine_spidermonkey.cpp"
- >
- </File>
- <File
- RelativePath="..\shell\mongo_vstudio.cpp"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- <FileConfiguration
- Name="Release|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- UsePrecompiledHeader="0"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath="..\scripting\utils.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="dbtests"
- >
- <File
- RelativePath=".\basictests.cpp"
- >
- </File>
- <File
- RelativePath=".\btreetests.cpp"
- >
- </File>
- <File
- RelativePath=".\clienttests.cpp"
- >
- </File>
- <File
- RelativePath=".\cursortests.cpp"
- >
- </File>
- <File
- RelativePath=".\dbtests.cpp"
- >
- </File>
- <File
- RelativePath=".\framework.cpp"
- >
- </File>
- <File
- RelativePath=".\jsobjtests.cpp"
- >
- </File>
- <File
- RelativePath=".\jsontests.cpp"
- >
- </File>
- <File
- RelativePath=".\jstests.cpp"
- >
- </File>
- <File
- RelativePath=".\matchertests.cpp"
- >
- </File>
- <File
- RelativePath=".\namespacetests.cpp"
- >
- </File>
- <File
- RelativePath=".\pairingtests.cpp"
- >
- </File>
- <File
- RelativePath=".\pdfiletests.cpp"
- >
- </File>
- <File
- RelativePath=".\queryoptimizertests.cpp"
- >
- </File>
- <File
- RelativePath=".\querytests.cpp"
- >
- </File>
- <File
- RelativePath=".\repltests.cpp"
- >
- </File>
- <File
- RelativePath=".\socktests.cpp"
- >
- </File>
- <File
- RelativePath=".\threadedtests.cpp"
- >
- <FileConfiguration
- Name="Debug|Win32"
- >
- <Tool
- Name="VCCLCompilerTool"
- DisableSpecificWarnings="4180"
- />
- </FileConfiguration>
- </File>
- <File
- RelativePath=".\updatetests.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="stats"
- >
- <File
- RelativePath="..\db\stats\counters.cpp"
- >
- </File>
- <File
- RelativePath="..\db\stats\snapshots.cpp"
- >
- </File>
- <File
- RelativePath="..\db\stats\top.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="replsets"
- >
- <File
- RelativePath="..\db\repl\consensus.cpp"
- >
- </File>
- <File
- RelativePath="..\db\repl\health.cpp"
- >
- </File>
- <File
- RelativePath="..\db\repl\heartbeat.cpp"
- >
- </File>
- <File
- RelativePath="..\db\repl\manager.cpp"
- >
- </File>
- <File
- RelativePath="..\db\repl\replset.cpp"
- >
- </File>
- <File
- RelativePath="..\db\repl\replset_commands.cpp"
- >
- </File>
- <File
- RelativePath="..\db\repl\rs_config.cpp"
- >
- </File>
- <File
- RelativePath="..\db\repl\rs_initiate.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="btree related"
- >
- <File
- RelativePath="..\db\btree.cpp"
- >
- </File>
- <File
- RelativePath="..\db\btree.h"
- >
- </File>
- <File
- RelativePath="..\db\btreecursor.cpp"
- >
- </File>
- </Filter>
- </Files>
- <Globals>
- </Globals>
-</VisualStudioProject>
diff --git a/dbtests/test.vcxproj b/dbtests/test.vcxproj
index d52278a..b80a730 100644
--- a/dbtests/test.vcxproj
+++ b/dbtests/test.vcxproj
@@ -68,7 +68,7 @@
<OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(SolutionDir)$(Configuration)\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(Configuration)\</IntDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(Configuration)\</IntDir>
- <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</LinkIncremental>
+ <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">false</LinkIncremental>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</LinkIncremental>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(SolutionDir)$(Configuration)\</OutDir>
@@ -88,6 +88,10 @@
<CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Release|x64'" />
<CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" />
<CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Release|x64'" />
+ <IncludePath Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">..;$(IncludePath)</IncludePath>
+ <IncludePath Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">..;$(IncludePath)</IncludePath>
+ <IncludePath Condition="'$(Configuration)|$(Platform)'=='Release|x64'">..;$(IncludePath)</IncludePath>
+ <IncludePath Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">..;$(IncludePath)</IncludePath>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
@@ -100,7 +104,7 @@
<PrecompiledHeader>Use</PrecompiledHeader>
<PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
<WarningLevel>Level3</WarningLevel>
- <DebugInformationFormat>EditAndContinue</DebugInformationFormat>
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<DisableSpecificWarnings>4355;4800;%(DisableSpecificWarnings)</DisableSpecificWarnings>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
</ClCompile>
@@ -112,13 +116,14 @@
<GenerateDebugInformation>true</GenerateDebugInformation>
<SubSystem>Console</SubSystem>
<TargetMachine>MachineX86</TargetMachine>
+ <Profile>true</Profile>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<Optimization>Disabled</Optimization>
<AdditionalIncludeDirectories>..\..\js\src;..\pcre-7.4;C:\boost;\boost;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
- <PreprocessorDefinitions>_UNICODE;UNICODE;SUPPORT_UCP;SUPPORT_UTF8;MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;WIN32;_DEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;PCRE_STATIC;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <PreprocessorDefinitions>_DURABLE;_UNICODE;UNICODE;SUPPORT_UCP;SUPPORT_UTF8;MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;WIN32;_DEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;PCRE_STATIC;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
<PrecompiledHeader>Use</PrecompiledHeader>
@@ -191,16 +196,17 @@
</ItemDefinitionGroup>
<ItemGroup>
<ClInclude Include="..\..\boostw\boost_1_34_1\boost\config\auto_link.hpp" />
+ <ClInclude Include="..\db\dur.h" />
+ <ClInclude Include="..\db\durop.h" />
+ <ClInclude Include="..\db\dur_journal.h" />
+ <ClInclude Include="..\db\jsobjmanipulator.h" />
+ <ClInclude Include="..\db\mongommf.h" />
+ <ClInclude Include="..\db\mongomutex.h" />
<ClInclude Include="..\pcre-7.4\pcrecpp.h" />
<ClInclude Include="..\targetver.h" />
<ClInclude Include="..\..\boostw\boost_1_34_1\boost\version.hpp" />
<ClInclude Include="..\pcre-7.4\config.h" />
<ClInclude Include="..\pcre-7.4\pcre.h" />
- <ClInclude Include="..\db\rec.h" />
- <ClInclude Include="..\db\reccache.h" />
- <ClInclude Include="..\db\reci.h" />
- <ClInclude Include="..\db\recstore.h" />
- <ClInclude Include="..\db\storage.h" />
<ClInclude Include="..\client\connpool.h" />
<ClInclude Include="..\client\dbclient.h" />
<ClInclude Include="..\client\model.h" />
@@ -244,6 +250,7 @@
<ClInclude Include="..\util\hashtab.h" />
<ClInclude Include="..\db\lasterror.h" />
<ClInclude Include="..\util\log.h" />
+ <ClInclude Include="..\util\logfile.h" />
<ClInclude Include="..\util\lruishmap.h" />
<ClInclude Include="..\util\md5.h" />
<ClInclude Include="..\util\md5.hpp" />
@@ -253,14 +260,28 @@
<ClInclude Include="..\util\unittest.h" />
</ItemGroup>
<ItemGroup>
+ <ClCompile Include="..\bson\oid.cpp" />
<ClCompile Include="..\client\dbclientcursor.cpp" />
+ <ClCompile Include="..\client\dbclient_rs.cpp" />
<ClCompile Include="..\client\distlock.cpp" />
<ClCompile Include="..\client\gridfs.cpp" />
<ClCompile Include="..\client\model.cpp" />
<ClCompile Include="..\client\parallel.cpp" />
<ClCompile Include="..\db\cap.cpp" />
+ <ClCompile Include="..\db\commands\isself.cpp" />
+ <ClCompile Include="..\db\compact.cpp" />
+ <ClCompile Include="..\db\dbcommands_generic.cpp" />
+ <ClCompile Include="..\db\dur.cpp" />
+ <ClCompile Include="..\db\durop.cpp" />
+ <ClCompile Include="..\db\dur_commitjob.cpp" />
+ <ClCompile Include="..\db\dur_journal.cpp" />
+ <ClCompile Include="..\db\dur_preplogbuffer.cpp" />
+ <ClCompile Include="..\db\dur_recover.cpp" />
+ <ClCompile Include="..\db\dur_writetodatafiles.cpp" />
<ClCompile Include="..\db\geo\2d.cpp" />
<ClCompile Include="..\db\geo\haystack.cpp" />
+ <ClCompile Include="..\db\mongommf.cpp" />
+ <ClCompile Include="..\db\projection.cpp" />
<ClCompile Include="..\db\repl\consensus.cpp" />
<ClCompile Include="..\db\repl\heartbeat.cpp" />
<ClCompile Include="..\db\repl\manager.cpp" />
@@ -270,6 +291,7 @@
<ClCompile Include="..\db\repl\rs_rollback.cpp" />
<ClCompile Include="..\db\repl\rs_sync.cpp" />
<ClCompile Include="..\db\restapi.cpp" />
+ <ClCompile Include="..\db\security_key.cpp" />
<ClCompile Include="..\pcre-7.4\pcrecpp.cc">
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
</PrecompiledHeader>
@@ -510,7 +532,6 @@
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
</PrecompiledHeader>
</ClCompile>
- <ClCompile Include="..\db\storage.cpp" />
<ClCompile Include="..\client\connpool.cpp" />
<ClCompile Include="..\client\dbclient.cpp" />
<ClCompile Include="..\client\syncclusterconnection.cpp" />
@@ -542,21 +563,29 @@
<ClCompile Include="..\db\json.cpp" />
<ClCompile Include="..\db\lasterror.cpp" />
<ClCompile Include="..\db\matcher.cpp" />
+ <ClCompile Include="..\scripting\bench.cpp" />
<ClCompile Include="..\s\chunk.cpp" />
<ClCompile Include="..\s\config.cpp" />
+ <ClCompile Include="..\s\d_chunk_manager.cpp" />
<ClCompile Include="..\s\d_migrate.cpp" />
<ClCompile Include="..\s\d_split.cpp" />
<ClCompile Include="..\s\d_state.cpp" />
- <ClCompile Include="..\s\d_util.cpp" />
<ClCompile Include="..\s\d_writeback.cpp" />
<ClCompile Include="..\s\grid.cpp" />
<ClCompile Include="..\s\shard.cpp" />
<ClCompile Include="..\s\shardconnection.cpp" />
<ClCompile Include="..\s\shardkey.cpp" />
+ <ClCompile Include="..\util\alignedbuilder.cpp">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\util\concurrency\spin_lock.cpp" />
+ <ClCompile Include="..\util\concurrency\synchronization.cpp" />
<ClCompile Include="..\util\concurrency\task.cpp" />
<ClCompile Include="..\util\concurrency\thread_pool.cpp" />
<ClCompile Include="..\util\concurrency\vars.cpp" />
+ <ClCompile Include="..\util\file_allocator.cpp" />
<ClCompile Include="..\util\log.cpp" />
+ <ClCompile Include="..\util\logfile.cpp" />
<ClCompile Include="..\util\mmap_win.cpp" />
<ClCompile Include="..\db\namespace.cpp" />
<ClCompile Include="..\db\nonce.cpp" />
@@ -564,7 +593,6 @@
<ClCompile Include="..\db\query.cpp" />
<ClCompile Include="..\db\queryoptimizer.cpp" />
<ClCompile Include="..\util\processinfo.cpp" />
- <ClCompile Include="..\util\ramstore.cpp" />
<ClCompile Include="..\db\repl.cpp" />
<ClCompile Include="..\db\security.cpp" />
<ClCompile Include="..\db\security_commands.cpp" />
@@ -598,7 +626,6 @@
<ClCompile Include="..\util\message_server_port.cpp" />
<ClCompile Include="..\util\miniwebserver.cpp" />
<ClCompile Include="..\util\mmap.cpp" />
- <ClCompile Include="..\util\ntservice.cpp" />
<ClCompile Include="..\util\processinfo_win32.cpp" />
<ClCompile Include="..\util\sock.cpp" />
<ClCompile Include="..\util\stringutils.cpp" />
@@ -610,8 +637,7 @@
<ClCompile Include="..\shell\mongo_vstudio.cpp">
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
</PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
</PrecompiledHeader>
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
@@ -624,14 +650,17 @@
<ClCompile Include="clienttests.cpp" />
<ClCompile Include="cursortests.cpp" />
<ClCompile Include="dbtests.cpp" />
+ <ClCompile Include="directclienttests.cpp" />
<ClCompile Include="framework.cpp" />
<ClCompile Include="jsobjtests.cpp" />
<ClCompile Include="jsontests.cpp" />
<ClCompile Include="jstests.cpp" />
<ClCompile Include="matchertests.cpp" />
+ <ClCompile Include="mmaptests.cpp" />
<ClCompile Include="namespacetests.cpp" />
<ClCompile Include="pairingtests.cpp" />
<ClCompile Include="pdfiletests.cpp" />
+ <ClCompile Include="perftests.cpp" />
<ClCompile Include="queryoptimizertests.cpp" />
<ClCompile Include="querytests.cpp" />
<ClCompile Include="repltests.cpp" />
diff --git a/dbtests/test.vcxproj.filters b/dbtests/test.vcxproj.filters
index ba4c4af..c52f7f6 100755
--- a/dbtests/test.vcxproj.filters
+++ b/dbtests/test.vcxproj.filters
@@ -7,9 +7,6 @@
<Filter Include="misc and third party\pcre">
<UniqueIdentifier>{0a50fb63-4ac3-4e30-a9d4-b0841878ee73}</UniqueIdentifier>
</Filter>
- <Filter Include="storage related">
- <UniqueIdentifier>{eb2684bf-ca8d-4162-9313-56a81233c471}</UniqueIdentifier>
- </Filter>
<Filter Include="client">
<UniqueIdentifier>{45dab36c-864e-45de-bb8e-cf1d87a2c4f6}</UniqueIdentifier>
</Filter>
@@ -44,15 +41,18 @@
<Filter Include="replsets">
<UniqueIdentifier>{9320a670-3b28-471a-bf92-6c8d881a37a4}</UniqueIdentifier>
</Filter>
- <Filter Include="btree related">
- <UniqueIdentifier>{4fff2dbf-30c4-4295-8db8-d513c1e36220}</UniqueIdentifier>
- </Filter>
<Filter Include="util\concurrency">
<UniqueIdentifier>{d499fdba-b256-4b12-af20-cdd1ae1addff}</UniqueIdentifier>
</Filter>
<Filter Include="util\h">
<UniqueIdentifier>{353b6f01-1cab-4156-a576-bc75ab204776}</UniqueIdentifier>
</Filter>
+ <Filter Include="btree">
+ <UniqueIdentifier>{4fff2dbf-30c4-4295-8db8-d513c1e36220}</UniqueIdentifier>
+ </Filter>
+ <Filter Include="dur">
+ <UniqueIdentifier>{c296d097-0d46-46ee-9097-f2df659d9596}</UniqueIdentifier>
+ </Filter>
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\..\boostw\boost_1_34_1\boost\config\auto_link.hpp">
@@ -73,21 +73,6 @@
<ClInclude Include="..\pcre-7.4\pcre.h">
<Filter>misc and third party\pcre</Filter>
</ClInclude>
- <ClInclude Include="..\db\rec.h">
- <Filter>storage related</Filter>
- </ClInclude>
- <ClInclude Include="..\db\reccache.h">
- <Filter>storage related</Filter>
- </ClInclude>
- <ClInclude Include="..\db\reci.h">
- <Filter>storage related</Filter>
- </ClInclude>
- <ClInclude Include="..\db\recstore.h">
- <Filter>storage related</Filter>
- </ClInclude>
- <ClInclude Include="..\db\storage.h">
- <Filter>storage related</Filter>
- </ClInclude>
<ClInclude Include="..\client\connpool.h">
<Filter>client</Filter>
</ClInclude>
@@ -188,7 +173,7 @@
<Filter>db\h</Filter>
</ClInclude>
<ClInclude Include="..\db\btree.h">
- <Filter>btree related</Filter>
+ <Filter>btree</Filter>
</ClInclude>
<ClInclude Include="..\util\concurrency\list.h">
<Filter>util\concurrency</Filter>
@@ -238,6 +223,27 @@
<ClInclude Include="..\util\sock.h">
<Filter>util\h</Filter>
</ClInclude>
+ <ClInclude Include="..\db\dur.h">
+ <Filter>dur</Filter>
+ </ClInclude>
+ <ClInclude Include="..\db\dur_journal.h">
+ <Filter>dur</Filter>
+ </ClInclude>
+ <ClInclude Include="..\util\logfile.h">
+ <Filter>dur</Filter>
+ </ClInclude>
+ <ClInclude Include="..\db\mongommf.h">
+ <Filter>dur</Filter>
+ </ClInclude>
+ <ClInclude Include="..\db\durop.h">
+ <Filter>dur</Filter>
+ </ClInclude>
+ <ClInclude Include="..\db\jsobjmanipulator.h">
+ <Filter>db</Filter>
+ </ClInclude>
+ <ClInclude Include="..\db\mongomutex.h">
+ <Filter>db</Filter>
+ </ClInclude>
</ItemGroup>
<ItemGroup>
<Library Include="..\..\js\js64r.lib">
@@ -326,9 +332,6 @@
<ClCompile Include="..\pcre-7.4\pcreposix.c">
<Filter>misc and third party\pcre</Filter>
</ClCompile>
- <ClCompile Include="..\db\storage.cpp">
- <Filter>storage related</Filter>
- </ClCompile>
<ClCompile Include="..\client\connpool.cpp">
<Filter>client</Filter>
</ClCompile>
@@ -422,9 +425,6 @@
<ClCompile Include="..\db\queryoptimizer.cpp">
<Filter>db\cpp</Filter>
</ClCompile>
- <ClCompile Include="..\util\ramstore.cpp">
- <Filter>db\cpp</Filter>
- </ClCompile>
<ClCompile Include="..\db\repl.cpp">
<Filter>db\cpp</Filter>
</ClCompile>
@@ -485,9 +485,6 @@
<ClCompile Include="..\util\mmap.cpp">
<Filter>util\cpp</Filter>
</ClCompile>
- <ClCompile Include="..\util\ntservice.cpp">
- <Filter>util\cpp</Filter>
- </ClCompile>
<ClCompile Include="..\util\processinfo_win32.cpp">
<Filter>util\cpp</Filter>
</ClCompile>
@@ -591,10 +588,10 @@
<Filter>replsets</Filter>
</ClCompile>
<ClCompile Include="..\db\btree.cpp">
- <Filter>btree related</Filter>
+ <Filter>btree</Filter>
</ClCompile>
<ClCompile Include="..\db\btreecursor.cpp">
- <Filter>btree related</Filter>
+ <Filter>btree</Filter>
</ClCompile>
<ClCompile Include="..\db\repl\manager.cpp">
<Filter>db\cpp</Filter>
@@ -614,9 +611,6 @@
<ClCompile Include="..\s\shardconnection.cpp">
<Filter>shard</Filter>
</ClCompile>
- <ClCompile Include="..\s\d_util.cpp">
- <Filter>shard</Filter>
- </ClCompile>
<ClCompile Include="..\util\concurrency\thread_pool.cpp">
<Filter>util\concurrency</Filter>
</ClCompile>
@@ -698,6 +692,81 @@
<ClCompile Include="..\db\restapi.cpp">
<Filter>db\cpp</Filter>
</ClCompile>
+ <ClCompile Include="..\util\concurrency\spin_lock.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="mmaptests.cpp">
+ <Filter>dbtests</Filter>
+ </ClCompile>
+ <ClCompile Include="..\scripting\bench.cpp">
+ <Filter>scripting</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\compact.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\commands\isself.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\dur.cpp">
+ <Filter>dur</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\dur_journal.cpp">
+ <Filter>dur</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\logfile.cpp">
+ <Filter>dur</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\mongommf.cpp">
+ <Filter>dur</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\projection.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\s\d_chunk_manager.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\dur_recover.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\durop.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\dbcommands_generic.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\alignedbuilder.cpp">
+ <Filter>util</Filter>
+ </ClCompile>
+ <ClCompile Include="..\bson\oid.cpp">
+ <Filter>db</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\concurrency\synchronization.cpp">
+ <Filter>util</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\dur_commitjob.cpp">
+ <Filter>dur</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\dur_writetodatafiles.cpp">
+ <Filter>dur</Filter>
+ </ClCompile>
+ <ClCompile Include="..\client\dbclient_rs.cpp">
+ <Filter>client</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\dur_preplogbuffer.cpp">
+ <Filter>dur</Filter>
+ </ClCompile>
+ <ClCompile Include="perftests.cpp">
+ <Filter>dbtests</Filter>
+ </ClCompile>
+ <ClCompile Include="directclienttests.cpp">
+ <Filter>dbtests</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\security_key.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\file_allocator.cpp">
+ <Filter>util\cpp</Filter>
+ </ClCompile>
</ItemGroup>
<ItemGroup>
<None Include="..\SConstruct">
diff --git a/dbtests/threadedtests.cpp b/dbtests/threadedtests.cpp
index af413cc..805b2d5 100644
--- a/dbtests/threadedtests.cpp
+++ b/dbtests/threadedtests.cpp
@@ -21,6 +21,7 @@
#include "../bson/util/atomic_int.h"
#include "../util/concurrency/mvar.h"
#include "../util/concurrency/thread_pool.h"
+#include "../util/timer.h"
#include <boost/thread.hpp>
#include <boost/bind.hpp>
@@ -29,34 +30,108 @@
namespace ThreadedTests {
template <int nthreads_param=10>
- class ThreadedTest{
- public:
- virtual void setup() {} //optional
- virtual void subthread() = 0;
- virtual void validate() = 0;
+ class ThreadedTest {
+ public:
+ virtual void setup() {} //optional
+ virtual void subthread() = 0;
+ virtual void validate() = 0;
- static const int nthreads = nthreads_param;
+ static const int nthreads = nthreads_param;
- void run(){
- setup();
+ void run() {
+ setup();
+ launch_subthreads(nthreads);
+ validate();
+ }
- launch_subthreads(nthreads);
+ virtual ~ThreadedTest() {}; // not necessary, but makes compilers happy
- validate();
- }
+ private:
+ void launch_subthreads(int remaining) {
+ if (!remaining) return;
- virtual ~ThreadedTest() {}; // not necessary, but makes compilers happy
+ boost::thread athread(boost::bind(&ThreadedTest::subthread, this));
- private:
- void launch_subthreads(int remaining){
- if (!remaining) return;
+ launch_subthreads(remaining - 1);
- boost::thread athread(boost::bind(&ThreadedTest::subthread, this));
-
- launch_subthreads(remaining - 1);
+ athread.join();
+ }
+ };
- athread.join();
+ class MongoMutexTest : public ThreadedTest<135> {
+#if defined(_DEBUG)
+ enum { N = 5000 };
+#else
+ enum { N = 40000 };
+#endif
+ MongoMutex *mm;
+ public:
+ void run() {
+ Timer t;
+ cout << "MongoMutexTest N:" << N << endl;
+ ThreadedTest<135>::run();
+ cout << "MongoMutexTest " << t.millis() << "ms" << endl;
+ }
+ private:
+ virtual void setup() {
+ mm = new MongoMutex("MongoMutexTest");
+ }
+ virtual void subthread() {
+ Client::initThread("mongomutextest");
+ sleepmillis(0);
+ for( int i = 0; i < N; i++ ) {
+ if( i % 7 == 0 ) {
+ mm->lock_shared();
+ mm->lock_shared();
+ mm->unlock_shared();
+ mm->unlock_shared();
+ }
+ else if( i % 7 == 1 ) {
+ mm->lock_shared();
+ ASSERT( mm->atLeastReadLocked() );
+ mm->unlock_shared();
+ }
+ else if( i % 7 == 2 ) {
+ mm->lock();
+ ASSERT( mm->isWriteLocked() );
+ mm->unlock();
+ }
+ else if( i % 7 == 3 ) {
+ mm->lock();
+ mm->lock_shared();
+ ASSERT( mm->isWriteLocked() );
+ mm->unlock_shared();
+ mm->unlock();
+ }
+ else if( i % 7 == 4 ) {
+ mm->lock();
+ mm->releaseEarly();
+ mm->unlock();
+ }
+ else if( i % 7 == 5 ) {
+ if( mm->lock_try(1) ) {
+ mm->unlock();
+ }
+ }
+ else if( i % 7 == 6 ) {
+ if( mm->lock_shared_try(0) ) {
+ mm->unlock_shared();
+ }
+ }
+ else {
+ mm->lock_shared();
+ mm->unlock_shared();
+ }
}
+ cc().shutdown();
+ }
+ virtual void validate() {
+ ASSERT( !mm->atLeastReadLocked() );
+ mm->lock();
+ mm->unlock();
+ mm->lock_shared();
+ mm->unlock_shared();
+ }
};
// Tested with up to 30k threads
@@ -64,13 +139,13 @@ namespace ThreadedTests {
static const int iterations = 1000000;
AtomicUInt target;
- void subthread(){
- for(int i=0; i < iterations; i++){
+ void subthread() {
+ for(int i=0; i < iterations; i++) {
//target.x++; // verified to fail with this version
target++;
}
}
- void validate(){
+ void validate() {
ASSERT_EQUALS(target.x , unsigned(nthreads * iterations));
AtomicUInt u;
@@ -80,6 +155,12 @@ namespace ThreadedTests {
ASSERT_EQUALS(2u, u--);
ASSERT_EQUALS(0u, --u);
ASSERT_EQUALS(0u, u);
+
+ u++;
+ ASSERT( u > 0 );
+
+ u--;
+ ASSERT( ! ( u > 0 ) );
}
};
@@ -87,10 +168,10 @@ namespace ThreadedTests {
static const int iterations = 10000;
MVar<int> target;
- public:
+ public:
MVarTest() : target(0) {}
- void subthread(){
- for(int i=0; i < iterations; i++){
+ void subthread() {
+ for(int i=0; i < iterations; i++) {
int val = target.take();
#if BOOST_VERSION >= 103500
//increase chances of catching failure
@@ -99,30 +180,30 @@ namespace ThreadedTests {
target.put(val+1);
}
}
- void validate(){
+ void validate() {
ASSERT_EQUALS(target.take() , nthreads * iterations);
}
};
- class ThreadPoolTest{
+ class ThreadPoolTest {
static const int iterations = 10000;
static const int nThreads = 8;
AtomicUInt counter;
- void increment(int n){
- for (int i=0; i<n; i++){
+ void increment(int n) {
+ for (int i=0; i<n; i++) {
counter++;
}
}
- public:
- void run(){
+ public:
+ void run() {
ThreadPool tp(nThreads);
- for (int i=0; i < iterations; i++){
+ for (int i=0; i < iterations; i++) {
tp.schedule(&ThreadPoolTest::increment, this, 2);
}
-
+
tp.join();
ASSERT(counter == (unsigned)(iterations * 2));
@@ -131,7 +212,7 @@ namespace ThreadedTests {
class LockTest {
public:
- void run(){
+ void run() {
// quick atomicint wrap test
// MSGID likely assumes this semantic
AtomicUInt counter = 0xffffffff;
@@ -145,14 +226,15 @@ namespace ThreadedTests {
class All : public Suite {
public:
- All() : Suite( "threading" ){
+ All() : Suite( "threading" ) {
}
- void setupTests(){
+ void setupTests() {
add< IsAtomicUIntAtomic >();
add< MVarTest >();
add< ThreadPoolTest >();
add< LockTest >();
+ add< MongoMutexTest >();
}
} myall;
}
diff --git a/dbtests/updatetests.cpp b/dbtests/updatetests.cpp
index 17f861e..0f95a32 100644
--- a/dbtests/updatetests.cpp
+++ b/dbtests/updatetests.cpp
@@ -110,14 +110,14 @@ namespace UpdateTests {
class PushAllNonArray : public Fail {
void doIt() {
- insert( ns(), fromjson( "{a:[1]}" ) );
+ insert( ns(), fromjson( "{a:[1]}" ) );
update( ns(), BSONObj(), fromjson( "{$pushAll:{a:'d'}}" ) );
}
};
class PullAllNonArray : public Fail {
void doIt() {
- insert( ns(), fromjson( "{a:[1]}" ) );
+ insert( ns(), fromjson( "{a:[1]}" ) );
update( ns(), BSONObj(), fromjson( "{$pullAll:{a:'d'}}" ) );
}
};
@@ -241,12 +241,12 @@ namespace UpdateTests {
class MultiInc : public SetBase {
public:
-
- string s(){
+
+ string s() {
stringstream ss;
auto_ptr<DBClientCursor> cc = client().query( ns() , Query().sort( BSON( "_id" << 1 ) ) );
bool first = true;
- while ( cc->more() ){
+ while ( cc->more() ) {
if ( first ) first = false;
else ss << ",";
@@ -255,11 +255,11 @@ namespace UpdateTests {
}
return ss.str();
}
-
- void run(){
+
+ void run() {
client().insert( ns(), BSON( "_id" << 1 << "x" << 1 ) );
client().insert( ns(), BSON( "_id" << 2 << "x" << 5 ) );
-
+
ASSERT_EQUALS( "1,5" , s() );
client().update( ns() , BSON( "_id" << 1 ) , BSON( "$inc" << BSON( "x" << 1 ) ) );
@@ -270,7 +270,7 @@ namespace UpdateTests {
client().update( ns() , BSONObj() , BSON( "$inc" << BSON( "x" << 1 ) ) , false , true );
ASSERT_EQUALS( "4,6" , s() );
-
+
}
};
@@ -498,10 +498,10 @@ namespace UpdateTests {
client().insert( ns(), BSON( "_id" << 55 << "i" << 5 ) );
client().update( ns(), BSON( "i" << 5 ), BSON( "i" << 6 ) );
ASSERT( !client().findOne( ns(), Query( BSON( "_id" << 55 ) ).hint
- ( "{\"_id\":ObjectId(\"000000000000000000000000\")}" ) ).isEmpty() );
+ ( "{\"_id\":ObjectId(\"000000000000000000000000\")}" ) ).isEmpty() );
}
};
-
+
class CheckNoMods : public SetBase {
public:
void run() {
@@ -509,7 +509,7 @@ namespace UpdateTests {
ASSERT( error() );
}
};
-
+
class UpdateMissingToNull : public SetBase {
public:
void run() {
@@ -520,10 +520,10 @@ namespace UpdateTests {
};
namespace ModSetTests {
-
+
class internal1 {
public:
- void run(){
+ void run() {
BSONObj b = BSON( "$inc" << BSON( "x" << 1 << "a.b" << 1 ) );
ModSet m(b);
@@ -532,7 +532,7 @@ namespace UpdateTests {
ASSERT( ! m.haveModForField( "y" ) );
ASSERT( ! m.haveModForField( "a.c" ) );
ASSERT( ! m.haveModForField( "a" ) );
-
+
ASSERT( m.haveConflictingMod( "x" ) );
ASSERT( m.haveConflictingMod( "a" ) );
ASSERT( m.haveConflictingMod( "a.b" ) );
@@ -541,14 +541,14 @@ namespace UpdateTests {
ASSERT( ! m.haveConflictingMod( "a.a" ) );
}
};
-
+
class Base {
public:
- virtual ~Base(){}
+ virtual ~Base() {}
+
-
- void test( BSONObj morig , BSONObj in , BSONObj wanted ){
+ void test( BSONObj morig , BSONObj in , BSONObj wanted ) {
BSONObj m = morig.copy();
ModSet set(m);
@@ -556,20 +556,20 @@ namespace UpdateTests {
ASSERT_EQUALS( wanted , out );
}
};
-
+
class inc1 : public Base {
public:
- void run(){
+ void run() {
BSONObj m = BSON( "$inc" << BSON( "x" << 1 ) );
test( m , BSON( "x" << 5 ) , BSON( "x" << 6 ) );
test( m , BSON( "a" << 5 ) , BSON( "a" << 5 << "x" << 1 ) );
test( m , BSON( "z" << 5 ) , BSON( "x" << 1 << "z" << 5 ) );
}
};
-
+
class inc2 : public Base {
public:
- void run(){
+ void run() {
BSONObj m = BSON( "$inc" << BSON( "a.b" << 1 ) );
test( m , BSONObj() , BSON( "a" << BSON( "b" << 1 ) ) );
test( m , BSON( "a" << BSON( "b" << 2 ) ) , BSON( "a" << BSON( "b" << 3 ) ) );
@@ -577,23 +577,23 @@ namespace UpdateTests {
m = BSON( "$inc" << BSON( "a.b" << 1 << "a.c" << 1 ) );
test( m , BSONObj() , BSON( "a" << BSON( "b" << 1 << "c" << 1 ) ) );
-
+
}
};
class set1 : public Base {
public:
- void run(){
+ void run() {
test( BSON( "$set" << BSON( "x" << 17 ) ) , BSONObj() , BSON( "x" << 17 ) );
test( BSON( "$set" << BSON( "x" << 17 ) ) , BSON( "x" << 5 ) , BSON( "x" << 17 ) );
test( BSON( "$set" << BSON( "x.a" << 17 ) ) , BSON( "z" << 5 ) , BSON( "x" << BSON( "a" << 17 )<< "z" << 5 ) );
}
- };
-
+ };
+
class push1 : public Base {
public:
- void run(){
+ void run() {
test( BSON( "$push" << BSON( "a" << 5 ) ) , fromjson( "{a:[1]}" ) , fromjson( "{a:[1,5]}" ) );
}
};
@@ -602,33 +602,45 @@ namespace UpdateTests {
namespace basic {
class Base : public ClientBase {
+ protected:
+
virtual const char * ns() = 0;
virtual void dotest() = 0;
-
- protected:
- void test( const char* initial , const char* mod , const char* after ){
+ void insert( const BSONObj& o ) {
+ client().insert( ns() , o );
+ }
+
+ void update( const BSONObj& m ) {
+ client().update( ns() , BSONObj() , m );
+ }
+
+ BSONObj findOne() {
+ return client().findOne( ns() , BSONObj() );
+ }
+
+ void test( const char* initial , const char* mod , const char* after ) {
test( fromjson( initial ) , fromjson( mod ) , fromjson( after ) );
}
- void test( const BSONObj& initial , const BSONObj& mod , const BSONObj& after ){
+ void test( const BSONObj& initial , const BSONObj& mod , const BSONObj& after ) {
client().dropCollection( ns() );
- client().insert( ns() , initial );
- client().update( ns() , BSONObj() , mod );
- ASSERT_EQUALS( after , client().findOne( ns(), BSONObj() ));
+ insert( initial );
+ update( mod );
+ ASSERT_EQUALS( after , findOne() );
client().dropCollection( ns() );
}
public:
-
- Base(){}
- virtual ~Base(){
+
+ Base() {}
+ virtual ~Base() {
}
- void run(){
+ void run() {
client().dropCollection( ns() );
-
+
dotest();
client().dropCollection( ns() );
@@ -640,98 +652,124 @@ namespace UpdateTests {
virtual BSONObj mod() = 0;
virtual BSONObj after() = 0;
- void dotest(){
+ void dotest() {
test( initial() , mod() , after() );
}
-
+
};
-
+
class inc1 : public SingleTest {
- virtual BSONObj initial(){
+ virtual BSONObj initial() {
return BSON( "_id" << 1 << "x" << 1 );
}
- virtual BSONObj mod(){
+ virtual BSONObj mod() {
return BSON( "$inc" << BSON( "x" << 2 ) );
}
- virtual BSONObj after(){
+ virtual BSONObj after() {
return BSON( "_id" << 1 << "x" << 3 );
}
- virtual const char * ns(){
+ virtual const char * ns() {
return "unittests.inc1";
}
};
class inc2 : public SingleTest {
- virtual BSONObj initial(){
+ virtual BSONObj initial() {
return BSON( "_id" << 1 << "x" << 1 );
}
- virtual BSONObj mod(){
+ virtual BSONObj mod() {
return BSON( "$inc" << BSON( "x" << 2.5 ) );
}
- virtual BSONObj after(){
+ virtual BSONObj after() {
return BSON( "_id" << 1 << "x" << 3.5 );
}
- virtual const char * ns(){
+ virtual const char * ns() {
return "unittests.inc2";
}
};
-
+
class inc3 : public SingleTest {
- virtual BSONObj initial(){
+ virtual BSONObj initial() {
return BSON( "_id" << 1 << "x" << 537142123123LL );
}
- virtual BSONObj mod(){
+ virtual BSONObj mod() {
return BSON( "$inc" << BSON( "x" << 2 ) );
}
- virtual BSONObj after(){
+ virtual BSONObj after() {
return BSON( "_id" << 1 << "x" << 537142123125LL );
}
- virtual const char * ns(){
- return "unittests.inc2";
+ virtual const char * ns() {
+ return "unittests.inc3";
}
};
class inc4 : public SingleTest {
- virtual BSONObj initial(){
+ virtual BSONObj initial() {
return BSON( "_id" << 1 << "x" << 537142123123LL );
}
- virtual BSONObj mod(){
+ virtual BSONObj mod() {
return BSON( "$inc" << BSON( "x" << 2LL ) );
}
- virtual BSONObj after(){
+ virtual BSONObj after() {
return BSON( "_id" << 1 << "x" << 537142123125LL );
}
- virtual const char * ns(){
- return "unittests.inc2";
+ virtual const char * ns() {
+ return "unittests.inc4";
}
};
class inc5 : public SingleTest {
- virtual BSONObj initial(){
+ virtual BSONObj initial() {
return BSON( "_id" << 1 << "x" << 537142123123LL );
}
- virtual BSONObj mod(){
+ virtual BSONObj mod() {
return BSON( "$inc" << BSON( "x" << 2.0 ) );
}
- virtual BSONObj after(){
+ virtual BSONObj after() {
return BSON( "_id" << 1 << "x" << 537142123125LL );
}
- virtual const char * ns(){
- return "unittests.inc2";
+ virtual const char * ns() {
+ return "unittests.inc5";
}
};
+ class inc6 : public Base {
+
+ virtual const char * ns() {
+ return "unittests.inc6";
+ }
+
+
+ virtual BSONObj initial() { return BSONObj(); }
+ virtual BSONObj mod() { return BSONObj(); }
+ virtual BSONObj after() { return BSONObj(); }
+
+ void dotest() {
+ client().insert( ns() , BSON( "x" << 5 ) );
+ ASSERT( findOne()["x"].type() == NumberInt );
+ long long start = 5;
+ long long max = numeric_limits<int>::max();
+ max *= 32;
+
+ while ( start < max ) {
+ update( BSON( "$inc" << BSON( "x" << 500000 ) ) );
+ start += 500000;
+ ASSERT_EQUALS( start , findOne()["x"].numberLong() ); // SERVER-2005
+ }
+
+ }
+ };
class bit1 : public Base {
- const char * ns(){
+ const char * ns() {
return "unittests.bit1";
}
- void dotest(){
+ void dotest() {
test( BSON( "_id" << 1 << "x" << 3 ) , BSON( "$bit" << BSON( "x" << BSON( "and" << 2 ) ) ) , BSON( "_id" << 1 << "x" << ( 3 & 2 ) ) );
test( BSON( "_id" << 1 << "x" << 1 ) , BSON( "$bit" << BSON( "x" << BSON( "or" << 4 ) ) ) , BSON( "_id" << 1 << "x" << ( 1 | 4 ) ) );
test( BSON( "_id" << 1 << "x" << 3 ) , BSON( "$bit" << BSON( "x" << BSON( "and" << 2 << "or" << 8 ) ) ) , BSON( "_id" << 1 << "x" << ( ( 3 & 2 ) | 8 ) ) );
@@ -739,21 +777,21 @@ namespace UpdateTests {
}
};
-
+
class unset : public Base {
- const char * ns(){
+ const char * ns() {
return "unittests.unset";
}
- void dotest(){
+ void dotest() {
test( "{_id:1,x:1}" , "{$unset:{x:1}}" , "{_id:1}" );
}
};
class setswitchint : public Base {
- const char * ns(){
+ const char * ns() {
return "unittests.int1";
}
- void dotest(){
+ void dotest() {
test( BSON( "_id" << 1 << "x" << 1 ) , BSON( "$set" << BSON( "x" << 5.6 ) ) , BSON( "_id" << 1 << "x" << 5.6 ) );
test( BSON( "_id" << 1 << "x" << 5.6 ) , BSON( "$set" << BSON( "x" << 1 ) ) , BSON( "_id" << 1 << "x" << 1 ) );
}
@@ -761,12 +799,12 @@ namespace UpdateTests {
};
-
+
class All : public Suite {
public:
All() : Suite( "update" ) {
}
- void setupTests(){
+ void setupTests() {
add< ModId >();
add< ModNonmodMix >();
add< InvalidMod >();
@@ -815,18 +853,19 @@ namespace UpdateTests {
add< PreserveIdWithIndex >();
add< CheckNoMods >();
add< UpdateMissingToNull >();
-
+
add< ModSetTests::internal1 >();
add< ModSetTests::inc1 >();
add< ModSetTests::inc2 >();
add< ModSetTests::set1 >();
add< ModSetTests::push1 >();
-
+
add< basic::inc1 >();
add< basic::inc2 >();
add< basic::inc3 >();
add< basic::inc4 >();
add< basic::inc5 >();
+ add< basic::inc6 >();
add< basic::bit1 >();
add< basic::unset >();
add< basic::setswitchint >();