Skip to content
Browse files

Many tests are failing still.

Merge branch 'typedcollections' of git://github.com/lanwin/mongodb-csharp into typedcollections

Conflicts:
	.gitignore
	MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.csproj
	MongoDB.GridFS/GridFileInfo.cs
	MongoDB.GridFS/GridFileStream.cs
	MongoDB.GridFS/MongoDB.GridFS.csproj
	MongoDB.Net-Tests/Bson/TestBsonBinary.cs
	MongoDB.Net-Tests/Bson/TestBsonReader.cs
	MongoDB.Net-Tests/MongoTestBase.cs
	MongoDBDriver/Bson/BsonReader.cs
	MongoDBDriver/Bson/BsonWriter.cs
	MongoDBDriver/Collection.cs
	MongoDBDriver/Connections/Connection.cs
	MongoDBDriver/MongoDB.Driver.csproj
	MongoDBDriver/Protocol/InsertMessage.cs
	MongoDBDriver/Protocol/MessageBase.cs
	MongoDBDriver/Protocol/RequestMessageBase.cs
  • Loading branch information...
2 parents 2b8934f + 1eeb478 commit 616c80c9ba411e25c4c85e835a6e851ba9b76ec0 @samus committed Mar 18, 2010
Showing with 4,185 additions and 3,911 deletions.
  1. +0 −1 .gitignore
  2. +13 −13 MongoDB.Driver.Benchmark/Main.cs
  3. +2 −2 MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.csproj
  4. +0 −36 MongoDB.GridFS.Tests/Properties/AssemblyInfo.cs
  5. +0 −68 MongoDB.GridFS/GridChunk.cs
  6. +14 −11 MongoDB.GridFS/GridFile.cs
  7. +4 −4 MongoDB.GridFS/GridFileInfo.cs
  8. +175 −173 MongoDB.GridFS/GridFileStream.cs
  9. +2 −2 MongoDB.GridFS/MongoDB.GridFS.csproj
  10. +0 −36 MongoDB.GridFS/Properties/AssemblyInfo.cs
  11. +4 −4 MongoDB.Linq.Tests/TestMongoDocumentQuerySyntax.cs
  12. +4 −4 MongoDB.Linq.Tests/TestQueryParsing.cs
  13. +0 −2,301 MongoDB.Linq.Tests/test-results/MongoDB.Linq.Tests.csproj-Debug-2009-10-12.xml
  14. +1 −1 MongoDB.Linq/MongoLinqEx.cs
  15. +3 −2 MongoDB.Linq/MongoQueryProvider.cs
  16. +1 −1 MongoDB.Net-Tests/Bson/TestBsonBinary.cs
  17. +8 −9 MongoDB.Net-Tests/Bson/TestBsonReader.cs
  18. +7 −7 MongoDB.Net-Tests/Bson/TestBsonWriter.cs
  19. +1 −1 MongoDB.Net-Tests/Bson/TestRoundTrips.cs
  20. +6 −6 MongoDB.Net-Tests/Connections/TestConnection.cs
  21. +2 −2 MongoDB.Net-Tests/IO/TestQueryMessage.cs
  22. +5 −1 MongoDB.Net-Tests/MongoDB.Driver.Tests.csproj
  23. +1 −1 MongoDB.Net-Tests/MongoTestBase.cs
  24. +129 −0 MongoDB.Net-Tests/Serialization/ArrayFactoryTests.cs
  25. +38 −0 MongoDB.Net-Tests/Serialization/ReflectionBuilderTests.cs
  26. +163 −0 MongoDB.Net-Tests/Serialization/RelectionDescriptorTests.cs
  27. +49 −0 MongoDB.Net-Tests/Serialization/SerializationTestBase.cs
  28. +25 −24 MongoDB.Net-Tests/TestCollection.cs
  29. +3 −6 MongoDB.Net-Tests/TestCollectionMetaData.cs
  30. +11 −10 MongoDB.Net-Tests/TestCollectionSafeMode.cs
  31. +15 −14 MongoDB.Net-Tests/TestConcurrency.cs
  32. +10 −10 MongoDB.Net-Tests/TestCursor.cs
  33. +4 −9 MongoDB.Net-Tests/TestDatabase.cs
  34. +6 −6 MongoDB.Net-Tests/{TestDatabaseJS.cs → TestDatabaseJavascript.cs}
  35. +2 −2 MongoDB.Net-Tests/TestMapReduce.cs
  36. +2 −2 MongoDB.Net-Tests/TestMapReduceBuilder.cs
  37. +1 −1 MongoDB.Net-Tests/TestMongo.cs
  38. +2 −3 MongoDBDriver/Binary.cs
  39. +15 −0 MongoDBDriver/Bson/BsonObjectProperty.cs
  40. +150 −139 MongoDBDriver/Bson/BsonReader.cs
  41. +282 −220 MongoDBDriver/Bson/BsonWriter.cs
  42. +110 −0 MongoDBDriver/Bson/DocumentBuilder.cs
  43. +26 −0 MongoDBDriver/Bson/DocumentDescriptor.cs
  44. +12 −0 MongoDBDriver/Bson/IBsonObjectBuilder.cs
  45. +13 −0 MongoDBDriver/Bson/IBsonObjectDescriptor.cs
  46. +5 −5 MongoDBDriver/CollectionMetaData.cs
  47. +16 −7 MongoDBDriver/Connections/Connection.cs
  48. +311 −152 MongoDBDriver/Cursor.cs
  49. +0 −207 MongoDBDriver/Database.cs
  50. +4 −4 MongoDBDriver/{DatabaseJS.cs → DatabaseJavascript.cs}
  51. +20 −18 MongoDBDriver/DatabaseMetaData.cs
  52. +19 −15 MongoDBDriver/ICursor.cs
  53. +386 −29 MongoDBDriver/IMongoCollection.cs
  54. +139 −0 MongoDBDriver/IMongoDatabase.cs
  55. +2 −2 MongoDBDriver/MapReduce.cs
  56. +4 −4 MongoDBDriver/Mongo.cs
  57. +689 −0 MongoDBDriver/MongoCollection.cs
  58. +24 −4 MongoDBDriver/MongoDB.Driver.csproj
  59. +305 −0 MongoDBDriver/MongoDatabase.cs
  60. +27 −28 MongoDBDriver/Protocol/DeleteMessage.cs
  61. +37 −38 MongoDBDriver/Protocol/GetMoreMessage.cs
  62. +12 −12 MongoDBDriver/Protocol/InsertMessage.cs
  63. +37 −38 MongoDBDriver/Protocol/KillCursorsMessage.cs
  64. +3 −3 MongoDBDriver/Protocol/MessageBase.cs
  65. +20 −20 MongoDBDriver/Protocol/MessageHeader.cs
  66. +17 −18 MongoDBDriver/Protocol/MsgMessage.cs
  67. +1 −1 MongoDBDriver/Protocol/OpCode.cs
  68. +56 −55 MongoDBDriver/Protocol/QueryMessage.cs
  69. +104 −72 MongoDBDriver/Protocol/ReplyMessage.cs
  70. +10 −10 MongoDBDriver/Protocol/RequestMessageBase.cs
  71. +29 −30 MongoDBDriver/Protocol/UpdateMessage.cs
  72. +58 −0 MongoDBDriver/Serialization/ArrayFactory.cs
  73. +56 −0 MongoDBDriver/Serialization/Handlers/DocumentArrayBuilderHandler.cs
  74. +30 −0 MongoDBDriver/Serialization/Handlers/DocumentBuilderHandler.cs
  75. +26 −0 MongoDBDriver/Serialization/Handlers/IBsonBuilderHandler.cs
  76. +45 −0 MongoDBDriver/Serialization/Handlers/ObjectArrayBuilderHandler.cs
  77. +41 −0 MongoDBDriver/Serialization/Handlers/ObjectBuilderHandler.cs
  78. +25 −0 MongoDBDriver/Serialization/MongoDefaultAttribute.cs
  79. +12 −0 MongoDBDriver/Serialization/MongoIgnoreAttribute.cs
  80. +27 −0 MongoDBDriver/Serialization/MongoNameAttribute.cs
  81. +77 −0 MongoDBDriver/Serialization/ObjectDescriptor.cs
  82. +67 −0 MongoDBDriver/Serialization/ReflectionBuilder.cs
  83. +67 −0 MongoDBDriver/Serialization/ReflectionBuilder´1.cs
  84. +38 −0 MongoDBDriver/Serialization/ReflectionDescriptor.cs
  85. +10 −0 MongoDBDriver/UpdateFlags.cs
  86. +5 −4 examples/Simple/Main.cs
  87. +3 −3 examples/SimpleVB/Application.vb
View
1 .gitignore
@@ -31,4 +31,3 @@
/redist/*.zip
*_ReSharper.*/**
->>>>>>> dc06fc695339fb920a153bad7a131c47f57ac7d7:.gitignore
View
26 MongoDB.Driver.Benchmark/Main.cs
@@ -26,7 +26,7 @@ public static void Main (string[] args)
Mongo m = new Mongo();
m.Connect();
- Database db = m["benchmark"];
+ MongoDatabase db = m["benchmark"];
db.MetaData.DropDatabase();
Console.WriteLine("Starting Tests");
@@ -104,7 +104,7 @@ public static void Main (string[] args)
large.Append("harvested_words", harvestedWords);
}
#region Insert Tests
- static void RunInsertTest(string name, Database db, string col, Document doc, bool index, bool bulk){
+ static void RunInsertTest(string name, MongoDatabase db, string col, Document doc, bool index, bool bulk){
TimeSpan lowest = TimeSpan.MaxValue;
for(int i = 0; i < trials; i++){
SetupInsert(db,"col",index);
@@ -115,7 +115,7 @@ public static void Main (string[] args)
Console.Out.WriteLine(String.Format("{0}{1} {2}", name + new string('.', 55 - name.Length), opsSec, lowest));
}
- static void SetupInsert(Database db, string col, bool index){
+ static void SetupInsert(MongoDatabase db, string col, bool index){
try{
db.MetaData.DropCollection(col);
if(index){
@@ -127,7 +127,7 @@ public static void Main (string[] args)
}
}
- static TimeSpan TimeInsert(Database db, string col, Document doc, bool bulk){
+ static TimeSpan TimeInsert(MongoDatabase db, string col, Document doc, bool bulk){
DateTime start = DateTime.Now;
if(bulk){
DoBulkInsert(db,col,doc, batchSize);
@@ -139,7 +139,7 @@ public static void Main (string[] args)
return t;
}
- static void DoInsert(Database db, string col, Document doc){
+ static void DoInsert(MongoDatabase db, string col, Document doc){
for(int i = 0; i < perTrial; i++){
Document ins = new Document();
doc.CopyTo(ins);
@@ -148,7 +148,7 @@ public static void Main (string[] args)
}
}
- static void DoBulkInsert(Database db, string col, Document doc, int size){
+ static void DoBulkInsert(MongoDatabase db, string col, Document doc, int size){
for(int i = 0; i < perTrial / size; i++){
Document[] docs = new Document[size];
for(int f = 0; f < docs.Length; f++){
@@ -184,7 +184,7 @@ public static void Main (string[] args)
MemoryStream ms = new MemoryStream();
for(int i = 0; i < perTrial; i++){
BsonWriter writer = new BsonWriter(ms);
- writer.Write(doc);
+ writer.WriteObject(doc);
ms.Seek(0,SeekOrigin.Begin);
}
}
@@ -193,7 +193,7 @@ public static void Main (string[] args)
static void RunDecodeTest(string name, Document doc){
MemoryStream ms = new MemoryStream();
BsonWriter writer = new BsonWriter(ms);
- writer.Write(doc);
+ writer.WriteObject(doc);
byte[] buff = ms.ToArray();
@@ -224,7 +224,7 @@ public static void Main (string[] args)
}
#region Find Tests
- static void RunFindTest(string name, Database db, string col, Document spec, bool range){
+ static void RunFindTest(string name, MongoDatabase db, string col, Document spec, bool range){
TimeSpan lowest = TimeSpan.MaxValue;
for(int i = 0; i < trials; i++){
TimeSpan ret = TimeFind(db, col, spec, range);
@@ -234,7 +234,7 @@ public static void Main (string[] args)
Console.Out.WriteLine(String.Format("{0}{1} {2}", name + new string('.', 55 - name.Length), opsSec, lowest));
}
- static TimeSpan TimeFind(Database db, string col,Document psec, bool range){
+ static TimeSpan TimeFind(MongoDatabase db, string col,Document psec, bool range){
DateTime start = DateTime.Now;
if(range){
DoFindOne(db,col,psec);
@@ -246,15 +246,15 @@ public static void Main (string[] args)
return t;
}
- static void DoFindOne(Database db, string col, Document spec){
+ static void DoFindOne(MongoDatabase db, string col, Document spec){
for(int i = 0; i < perTrial; i++){
db[col].FindOne(spec);
}
}
- static void DoFind(Database db, string col, Document spec){
+ static void DoFind(MongoDatabase db, string col, Document spec){
for(int i = 0; i < perTrial; i++){
- ICursor cur = db[col].Find(spec);
+ ICursor<Document> cur = db[col].Find(spec);
foreach(Document d in cur.Documents){
}
}
View
4 MongoDB.GridFS.Tests/MongoDB.GridFS.Tests.csproj
@@ -3,7 +3,7 @@
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
- <ProductVersion>9.0.21022</ProductVersion>
+ <ProductVersion>9.0.30729</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{0C293FE9-F670-4FEF-A60F-20F8C978B1CD}</ProjectGuid>
<OutputType>Library</OutputType>
@@ -59,4 +59,4 @@
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
-</Project>
+</Project>
View
36 MongoDB.GridFS.Tests/Properties/AssemblyInfo.cs
@@ -1,36 +0,0 @@
-using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// General Information about an assembly is controlled through the following
-// set of attributes. Change these attribute values to modify the information
-// associated with an assembly.
-[assembly: AssemblyTitle("MongoDB.Driver.GridFS.Tests")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("Microsoft")]
-[assembly: AssemblyProduct("MongoDB.Driver.GridFS.Tests")]
-[assembly: AssemblyCopyright("Copyright © Microsoft 2009")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// Setting ComVisible to false makes the types in this assembly not visible
-// to COM components. If you need to access a type in this assembly from
-// COM, set the ComVisible attribute to true on that type.
-[assembly: ComVisible(false)]
-
-// The following GUID is for the ID of the typelib if this project is exposed to COM
-[assembly: Guid("94ed0246-0a62-4e43-94fb-bd52a9efc901")]
-
-// Version information for an assembly consists of the following four values:
-//
-// Major Version
-// Minor Version
-// Build Number
-// Revision
-//
-// You can specify all the values or you can default the Build and Revision Numbers
-// by using the '*' as shown below:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]
View
68 MongoDB.GridFS/GridChunk.cs
@@ -1,68 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Text;
-using MongoDB.Driver;
-
-namespace MongoDB.Driver.GridFS
-{
- public class GridChunk : IComparable //TODO Change back to a struct
- {
- public GridChunk(object filesId, int n, byte[] data){
-// OidGenerator oidGenerator = new OidGenerator();
-// this.id = oidGenerator.Generate();
- this.filesId = filesId;
- this.n = n;
- this.data = new Binary(data);
- }
-
- public GridChunk(Document doc)
- {
- this.id = (Oid)doc["_id"];
- this.filesId = (Object)doc["files_id"];
- this.n = Convert.ToInt32(doc["n"]);
- this.data = (Binary)doc["data"];
- }
-
- // object id of the chunk in the _chunks collection
- private Object id;
- public Object Id{
- get { return this.id; }
- set { this.id = value; }
- }
- // id value of the owning {{files}} collection entry
- private Object filesId;
- public Object FilesId{
- get { return this.filesId; }
- set { this.filesId = value; }
- }
-
- //Chunk number
- private int n;
- public int N{
- get { return this.n; }
- set { this.n = value; }
- }
-
- private Binary data;
- public Binary Data{
- get { return this.data; }
- }
-
- //Allow sorting by chunk number
- public int CompareTo(Object obj){
- GridChunk chunk = (GridChunk)obj;
- return this.n.CompareTo(chunk.N);
- }
-
- public Document ToDocument()
- {
- Document doc = new Document();
- if(this.id != null) doc["_id"] = this.id;
- doc["files_id"] = this.filesId;
- doc["n"] = this.n;
- doc["data"] = this.data;
- return doc;
- }
-
- }
-}
View
25 MongoDB.GridFS/GridFile.cs
@@ -6,38 +6,41 @@ namespace MongoDB.GridFS
{
public class GridFile{
- private Database db;
+ private IMongoDatabase db;
private string name;
public string Name {
get { return name; }
}
-
- private IMongoCollection files;
- public IMongoCollection Files{
+
+ private IMongoCollection<Document> files;
+ public IMongoCollection<Document> Files
+ {
get { return this.files; }
}
- private IMongoCollection chunks;
- public IMongoCollection Chunks{
+ private IMongoCollection<Document> chunks;
+ public IMongoCollection<Document> Chunks
+ {
get { return this.chunks; }
}
- public GridFile(Database db):this(db,"fs"){}
+ public GridFile(IMongoDatabase db):this(db,"fs"){}
- public GridFile(Database db, string bucket){
+ public GridFile(IMongoDatabase db, string bucket){
this.db = db;
this.files = db[bucket + ".files"];
this.chunks = db[bucket + ".chunks"];
this.chunks.MetaData.CreateIndex(new Document().Append("files_id", 1).Append("n", 1),true);
this.name = bucket;
}
- public ICursor ListFiles(){
+ public ICursor<Document> ListFiles(){
return this.ListFiles(new Document());
}
-
- public ICursor ListFiles(Document query){
+
+ public ICursor<Document> ListFiles(Document query)
+ {
return this.files.Find(new Document().Append("query",query)
.Append("orderby", new Document()
.Append("filename", 1)));
View
8 MongoDB.GridFS/GridFileInfo.cs
@@ -18,7 +18,7 @@ public class GridFileInfo
private const string DEFAULT_CONTENT_TYPE = "text/plain";
private GridFile gridFile;
- private Database db;
+ private IMongoDatabase db;
private string bucket;
@@ -58,7 +58,7 @@ public string FileName
if(filedata["aliases"] is IList<String>){
return (List<String>)filedata["aliases"];
}else{
- return null;
+ return new List<String>();
}
}
set { filedata["aliases"] = value; }
@@ -84,15 +84,15 @@ public string FileName
}
#endregion
- public GridFileInfo(Database db, string bucket, string filename){
+ public GridFileInfo(IMongoDatabase db, string bucket, string filename){
this.db = db;
this.bucket = bucket;
this.gridFile = new GridFile(db,bucket);
SetFileDataDefaults(filename);
if(gridFile.Exists(filename)) this.LoadFileData();
}
- public GridFileInfo(Database db, string filename){
+ public GridFileInfo(MongoDatabase db, string filename){
this.db = db;
this.bucket = "fs";
this.gridFile = new GridFile(db);
View
348 MongoDB.GridFS/GridFileStream.cs
@@ -6,21 +6,21 @@
using MongoDB.Driver;
namespace MongoDB.GridFS
-{
+{
/// <summary>
/// Stream for reading and writing to a file in GridFS.
/// </summary>
/// <remarks>
/// When using the stream for random io it is possible to produce chunks in the begining and middle of the
/// file that are not full size followed by other chunks that are full size. This only affects the md5 sum
- /// that is calculated on the file on close. Because of this do not rely on the md5 sum of a file when doing
+ /// that is calculated on the file on close. Because of this do not rely on the md5 sum of a file when doing
/// random io. Writing to the stream sequentially works fine and will produce a consistent md5.
/// </remarks>
public class GridFileStream : Stream
{
-
- private IMongoCollection files;
- private IMongoCollection chunks;
+
+ private IMongoCollection<Document> files;
+ private IMongoCollection<Document> chunks;
private Document chunk;
private bool chunkDirty;
private long chunkLower = -1;
@@ -32,56 +32,52 @@ public class GridFileStream : Stream
private int highestBuffPosition;
private long highestPosWritten;
-
+
#region Properties
- private GridFileInfo gridFileInfo;
+ private GridFileInfo gridFileInfo;
public GridFileInfo GridFileInfo {
get { return gridFileInfo; }
set { gridFileInfo = value; }
}
-
+
private bool canRead;
public override bool CanRead {
get { return canRead; }
}
-
+
private bool canWrite;
public override bool CanWrite {
get { return canRead; }
}
-
+
public override bool CanSeek {
get { return true; }
}
-
+
public override long Length {
- get {
- return gridFileInfo.Length;
- }
+ get { return gridFileInfo.Length; }
}
private long position;
public override long Position {
- get {
- return position;
- }
- set {
- this.Seek(value, SeekOrigin.Begin);
- }
+ get { return position; }
+ set { this.Seek (value, SeekOrigin.Begin); }
}
#endregion
-
- public GridFileStream(GridFileInfo gridfileinfo,IMongoCollection files, IMongoCollection chunks, FileAccess access){
- switch (access){
- case FileAccess.Read:
- canRead = true;
- break;
- case FileAccess.ReadWrite:
- canRead = true;
- canWrite = true;
- break;
- case FileAccess.Write:
- canWrite = true;
+
+ public GridFileStream (GridFileInfo gridfileinfo, IMongoCollection<Document> files,
+ IMongoCollection<Document> chunks, FileAccess access)
+ {
+ switch (access) {
+ case FileAccess.Read:
+ canRead = true;
+ break;
+ case FileAccess.ReadWrite:
+ canRead = true;
+ canWrite = true;
+ break;
+ case FileAccess.Write:
+ canWrite = true;
break;
}
this.gridFileInfo = gridfileinfo;
@@ -90,53 +86,51 @@ public class GridFileStream : Stream
this.buffer = new byte[gridFileInfo.ChunkSize];
this.blankBuffer = new byte[gridFileInfo.ChunkSize];
this.highestPosWritten = this.gridFileInfo.Length;
- this.MoveTo(0);
+ this.MoveTo (0);
}
/// <summary>
/// Reads data from the stream into the specified array. It will fill the array in starting at offset and
/// adding count bytes returning the number of bytes read from the stream.
/// </summary>
- public override int Read(byte[] array, int offset, int count){
+ public override int Read (byte[] array, int offset, int count)
+ {
int bytesLeftToRead = count;
int bytesRead = 0;
- while(bytesLeftToRead > 0 && this.position < this.Length){
+ while (bytesLeftToRead > 0 && this.position < this.Length) {
int buffAvailable = buffer.Length - buffPosition;
int readCount = 0;
- if(buffAvailable > bytesLeftToRead){
+ if (buffAvailable > bytesLeftToRead) {
readCount = bytesLeftToRead;
- }else{
+ } else {
readCount = buffAvailable;
}
- if(readCount + position > highestPosWritten){
+ if (readCount + position > highestPosWritten) {
//adjust readcount so that we don't read past the end of file.
readCount = readCount - (int)(readCount + position - highestPosWritten);
}
- Array.Copy(buffer,buffPosition,array,offset,readCount);
+ Array.Copy (buffer, buffPosition, array, offset, readCount);
buffPosition += readCount;
bytesLeftToRead -= readCount;
bytesRead += readCount;
offset += readCount;
- MoveTo(position + readCount);
+ MoveTo (position + readCount);
}
return bytesRead;
}
- private void ValidateReadState(byte[] array, int offset, int count){
- if (array == null){
- throw new ArgumentNullException("array", new Exception("array is null"));
- }
- else if (offset < 0){
- throw new ArgumentOutOfRangeException("offset", new Exception("offset is negative"));
- }
- else if (count < 0){
- throw new ArgumentOutOfRangeException("count", new Exception("count is negative"));
- }
- else if ((array.Length - offset) < count){
- throw new MongoGridFSException("Invalid count argument", gridFileInfo.FileName, null);
- }
- else if (!canRead){
- throw new MongoGridFSException("Reading this file is not supported", gridFileInfo.FileName, null);
+ private void ValidateReadState (byte[] array, int offset, int count)
+ {
+ if (array == null) {
+ throw new ArgumentNullException ("array", new Exception ("array is null"));
+ } else if (offset < 0) {
+ throw new ArgumentOutOfRangeException ("offset", new Exception ("offset is negative"));
+ } else if (count < 0) {
+ throw new ArgumentOutOfRangeException ("count", new Exception ("count is negative"));
+ } else if ((array.Length - offset) < count) {
+ throw new MongoGridFSException ("Invalid count argument", gridFileInfo.FileName, null);
+ } else if (!canRead) {
+ throw new MongoGridFSException ("Reading this file is not supported", gridFileInfo.FileName, null);
}
}
@@ -152,64 +146,68 @@ public class GridFileStream : Stream
/// <param name="count">
/// A <see cref="System.Int32"/> The number of bytes from within the source array to copy.
/// </param>
- public override void Write(byte[] array, int offset, int count){
- ValidateWriteState(array,offset,count);
-
+ public override void Write (byte[] array, int offset, int count)
+ {
+ ValidateWriteState (array, offset, count);
+
int bytesLeftToWrite = count;
- while(bytesLeftToWrite > 0){
+ while (bytesLeftToWrite > 0) {
int buffAvailable = buffer.Length - buffPosition;
int writeCount = 0;
- if(buffAvailable > bytesLeftToWrite){
+ if (buffAvailable > bytesLeftToWrite) {
writeCount = bytesLeftToWrite;
- }else{
+ } else {
writeCount = buffAvailable;
}
- Array.Copy(array,offset,buffer,buffPosition,writeCount);
+ Array.Copy (array, offset, buffer, buffPosition, writeCount);
chunkDirty = true;
buffPosition += writeCount;
offset += writeCount;
bytesLeftToWrite -= writeCount;
- MoveTo(position + writeCount);
- highestPosWritten = Math.Max(highestPosWritten, position);
+ MoveTo (position + writeCount);
+ highestPosWritten = Math.Max (highestPosWritten, position);
}
}
-
- private void ValidateWriteState(byte[] array, int offset, int count){
- if (array == null){
- throw new ArgumentNullException("array", new Exception("array is null"));
- }else if (offset < 0){
- throw new ArgumentOutOfRangeException("offset", new Exception("offset is negative"));
- }else if (count < 0){
- throw new ArgumentOutOfRangeException("count",new Exception("count is negative"));
- }else if ((array.Length - offset) < count){
- throw new MongoGridFSException("Invalid count argument", gridFileInfo.FileName, null);
- }else if (!canWrite){
- throw new System.NotSupportedException("Stream does not support writing.");
+
+ private void ValidateWriteState (byte[] array, int offset, int count)
+ {
+ if (array == null) {
+ throw new ArgumentNullException ("array", new Exception ("array is null"));
+ } else if (offset < 0) {
+ throw new ArgumentOutOfRangeException ("offset", new Exception ("offset is negative"));
+ } else if (count < 0) {
+ throw new ArgumentOutOfRangeException ("count", new Exception ("count is negative"));
+ } else if ((array.Length - offset) < count) {
+ throw new MongoGridFSException ("Invalid count argument", gridFileInfo.FileName, null);
+ } else if (!canWrite) {
+ throw new System.NotSupportedException ("Stream does not support writing.");
}
}
-
+
/// <summary>
/// Flushes any changes to current chunk to the database. It can be called in client code at any time or it
/// will automatically be called on Close() and when the stream position moves off the bounds of the current
/// chunk.
/// </summary>
- public override void Flush(){
- if(chunkDirty == false) return;
+ public override void Flush ()
+ {
+ if (chunkDirty == false)
+ return;
//avoid a copy if possible.
- if(highestBuffPosition == buffer.Length){
- chunk["data"] = new Binary(buffer);
- }else{
+ if (highestBuffPosition == buffer.Length) {
+ chunk["data"] = new Binary (buffer);
+ } else {
byte[] data = new byte[highestBuffPosition];
- Array.Copy(buffer,data,highestBuffPosition);
- chunk["data"] = new Binary(data);
+ Array.Copy (buffer, data, highestBuffPosition);
+ chunk["data"] = new Binary (data);
}
- if(chunk.Contains("_id")){
- chunks.Update(chunk);
- }else{
- chunks.Insert(chunk);
+ if (chunk.Contains ("_id")) {
+ chunks.Update (chunk);
+ } else {
+ chunks.Insert (chunk);
}
this.gridFileInfo.Length = highestPosWritten;
}
@@ -219,30 +217,31 @@ public class GridFileStream : Stream
/// location will cause the file to grow to that size. Any holes that may be created from the seek will
/// be zero filled on close.
/// </summary>
- public override long Seek(long offset, SeekOrigin origin){
- if ((origin < SeekOrigin.Begin) || (origin > SeekOrigin.End)){
- throw new ArgumentException("Invalid Seek Origin");
+ public override long Seek (long offset, SeekOrigin origin)
+ {
+ if ((origin < SeekOrigin.Begin) || (origin > SeekOrigin.End)) {
+ throw new ArgumentException ("Invalid Seek Origin");
}
- switch (origin){
- case SeekOrigin.Begin:
- if (offset < 0){
- throw new ArgumentException("Attempted seeking before the begining of the stream");
- }else{
- MoveTo(offset);
- }
- break;
- case SeekOrigin.Current:
- MoveTo(position + offset);
- break;
- case SeekOrigin.End:
- if (offset <= 0){
- throw new ArgumentException("Attempted seeking after the end of the stream");
- }
- MoveTo(this.Length - offset);
- break;
+ switch (origin) {
+ case SeekOrigin.Begin:
+ if (offset < 0) {
+ throw new ArgumentException ("Attempted seeking before the begining of the stream");
+ } else {
+ MoveTo (offset);
+ }
+ break;
+ case SeekOrigin.Current:
+ MoveTo (position + offset);
+ break;
+ case SeekOrigin.End:
+ if (offset <= 0) {
+ throw new ArgumentException ("Attempted seeking after the end of the stream");
+ }
+ MoveTo (this.Length - offset);
+ break;
}
- return position;
+ return position;
}
/// <summary>
@@ -251,79 +250,84 @@ public class GridFileStream : Stream
/// <param name="value">
/// A <see cref="System.Int64"/>
/// </param>
- public override void SetLength(long value){
- if(value < 0) throw new ArgumentOutOfRangeException("length");
- if(this.CanSeek == false || this.CanWrite == false) {
- throw new NotSupportedException("The stream does not support both writing and seeking.");
+ public override void SetLength (long value)
+ {
+ if (value < 0)
+ throw new ArgumentOutOfRangeException ("length");
+ if (this.CanSeek == false || this.CanWrite == false) {
+ throw new NotSupportedException ("The stream does not support both writing and seeking.");
}
-
- if(value < highestPosWritten) {
- TruncateAfter(value);
- }else{
- this.Seek(value, SeekOrigin.Begin);
+
+ if (value < highestPosWritten) {
+ TruncateAfter (value);
+ } else {
+ this.Seek (value, SeekOrigin.Begin);
}
chunkDirty = true;
this.gridFileInfo.Length = value;
highestPosWritten = value;
-
+
}
/// <summary>
/// Close the stream and flush any changes to the database.
/// </summary>
- public override void Close(){
- this.Flush();
+ public override void Close ()
+ {
+ this.Flush ();
this.gridFileInfo.Length = highestPosWritten;
- EnsureNoHoles();
- string md5 = gridFileInfo.CalcMD5();
+ EnsureNoHoles ();
+ string md5 = gridFileInfo.CalcMD5 ();
gridFileInfo.Md5 = md5;
- this.files.Update(gridFileInfo.ToDocument());
- base.Close();
+ this.files.Update (gridFileInfo.ToDocument ());
+ base.Close ();
}
/// <summary>
/// Moves the current position to the new position. If this causes a new chunk to need to be loaded it will take
- /// care of flushing the buffer and loading a new chunk.
+ /// care of flushing the buffer and loading a new chunk.
/// </summary>
/// <param name="position">
/// A <see cref="System.Int32"/> designating where to go to.
/// </param>
- private void MoveTo(long position){
+ private void MoveTo (long position)
+ {
this.position = position;
int chunkSize = this.gridFileInfo.ChunkSize;
bool chunkInRange = (chunk != null && position >= chunkLower && position < chunkUpper);
- if(chunkInRange == false){
- if(chunk != null && chunkDirty){
- highestBuffPosition = Math.Max(highestBuffPosition, buffPosition);
- this.Flush();
+ if (chunkInRange == false) {
+ if (chunk != null && chunkDirty) {
+ highestBuffPosition = Math.Max (highestBuffPosition, buffPosition);
+ this.Flush ();
}
- int chunknum = (int)Math.Floor((double)(position / chunkSize));
- Array.Copy(blankBuffer,buffer,buffer.Length);
- LoadOrCreateChunk(chunknum);
+ int chunknum = (int)Math.Floor ((double)(position / chunkSize));
+ Array.Copy (blankBuffer, buffer, buffer.Length);
+ LoadOrCreateChunk (chunknum);
chunkDirty = false;
chunkLower = chunknum * chunkSize;
chunkUpper = chunkLower + chunkSize;
}
buffPosition = (int)(position % chunkSize);
- highestBuffPosition = Math.Max(highestBuffPosition, buffPosition);
+ highestBuffPosition = Math.Max (highestBuffPosition, buffPosition);
}
/// <summary>
/// Loads a chunk from the chunks collection if it exists. Otherwise it creates a blank chunk Document.
/// </summary>
/// <param name="num"></param>
- private void LoadOrCreateChunk(int num){
+ private void LoadOrCreateChunk (int num)
+ {
Object fid = this.GridFileInfo.Id;
- Document spec = new Document().Append("files_id", fid).Append("n",num);
- chunk = this.chunks.FindOne(spec);
- if(chunk == null) {
+ Document spec = new Document ().Append ("files_id", fid).Append ("n", num);
+ chunk = this.chunks.FindOne (spec);
+ if (chunk == null) {
chunk = spec;
highestBuffPosition = 0;
- }else{
+ } else {
Binary b = (Binary)chunk["data"];
highestBuffPosition = b.Bytes.Length;
- Array.Copy(b.Bytes,buffer, highestBuffPosition);
+ Array.Copy (b.Bytes, buffer, highestBuffPosition);
}
}
@@ -332,58 +336,56 @@ public class GridFileStream : Stream
/// Deletes all chunks after the specified position and clears out any extra bytes if the position doesn't fall on
/// a chunk boundry.
/// </summary>
- private void TruncateAfter(long value){
- int chunknum = CalcChunkNum(value);
- Document spec = new Document().Append("files_id", this.gridFileInfo.Id)
- .Append("n",new Document().Append("$gt",chunknum));
- this.chunks.Delete(spec);
- this.MoveTo(value );
- Array.Copy(blankBuffer,0,buffer,buffPosition, buffer.Length - buffPosition);
+ private void TruncateAfter (long value)
+ {
+ int chunknum = CalcChunkNum (value);
+ Document spec = new Document ().Append ("files_id", this.gridFileInfo.Id).Append ("n", new Document ().Append ("$gt", chunknum));
+ this.chunks.Delete (spec);
+ this.MoveTo (value);
+ Array.Copy (blankBuffer, 0, buffer, buffPosition, buffer.Length - buffPosition);
highestBuffPosition = buffPosition;
}
- private int CalcChunkNum(long position){
+ private int CalcChunkNum (long position)
+ {
int chunkSize = this.gridFileInfo.ChunkSize;
- return (int)Math.Floor((double)(position / chunkSize));
+ return (int)Math.Floor ((double)(position / chunkSize));
}
/// <summary>
/// Makes sure that at least a skelton chunk exists for all numbers. If not the MD5 calculation will fail on a sparse file.
/// </summary>
- private void EnsureNoHoles(){
- int highChunk = CalcChunkNum(this.GridFileInfo.Length);
- Document query = new Document().Append("files_id", this.GridFileInfo.Id)
- .Append("n", new Document()
- .Append("$lte",highChunk));
- Document sort = new Document().Append("n",1);
- Document fields = new Document().Append("_id", 1).Append("n",1);
-
- Binary data = new Binary(this.blankBuffer);
+ private void EnsureNoHoles ()
+ {
+ int highChunk = CalcChunkNum (this.GridFileInfo.Length);
+ Document query = new Document ().Append ("files_id", this.GridFileInfo.Id).Append ("n", new Document ().Append ("$lte", highChunk));
+ Document sort = new Document ().Append ("n", 1);
+ Document fields = new Document ().Append ("_id", 1).Append ("n", 1);
+
+ Binary data = new Binary (this.blankBuffer);
int i = 0;
- using (ICursor cur = chunks.Find(new Document().Append("query",query).Append("sort",sort),0,0,fields)){
- foreach(Document doc in cur.Documents){
- int n = Convert.ToInt32(doc["n"]);
- if(i < n){
- while(i < n){
- chunks.Insert(new Document().Append("files_id", this.gridFileInfo.Id)
- .Append("n", i)
- .Append("data", data)
- );
+ using (ICursor<Document> cur = chunks.Find (new Document ().Append ("query", query).Append ("sort", sort), 0, 0, fields)) {
+ foreach (Document doc in cur.Documents) {
+ int n = Convert.ToInt32 (doc["n"]);
+ if (i < n) {
+ while (i < n) {
+ chunks.Insert (new Document ().Append ("files_id", this.gridFileInfo.Id).Append ("n", i).Append ("data", data));
i++;
}
- }else{
+ } else {
i++;
}
}
}
-
+
}
- protected override void Dispose(bool disposing){
+ protected override void Dispose (bool disposing)
+ {
this.canRead = false;
this.canWrite = false;
- base.Dispose(disposing);
+ base.Dispose (disposing);
}
}
-}
+}
View
4 MongoDB.GridFS/MongoDB.GridFS.csproj
@@ -3,7 +3,7 @@
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
- <ProductVersion>9.0.21022</ProductVersion>
+ <ProductVersion>9.0.30729</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{B42DBBF9-0A1F-4749-9787-013BF8D8F435}</ProjectGuid>
<OutputType>Library</OutputType>
@@ -46,4 +46,4 @@
</ProjectReference>
</ItemGroup>
<Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
-</Project>
+</Project>
View
36 MongoDB.GridFS/Properties/AssemblyInfo.cs
@@ -1,36 +0,0 @@
-using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
-
-// General Information about an assembly is controlled through the following
-// set of attributes. Change these attribute values to modify the information
-// associated with an assembly.
-[assembly: AssemblyTitle("MongoDB.Driver.GridFS")]
-[assembly: AssemblyDescription("")]
-[assembly: AssemblyConfiguration("")]
-[assembly: AssemblyCompany("Microsoft")]
-[assembly: AssemblyProduct("MongoDB.Driver.GridFS")]
-[assembly: AssemblyCopyright("Copyright © Microsoft 2009")]
-[assembly: AssemblyTrademark("")]
-[assembly: AssemblyCulture("")]
-
-// Setting ComVisible to false makes the types in this assembly not visible
-// to COM components. If you need to access a type in this assembly from
-// COM, set the ComVisible attribute to true on that type.
-[assembly: ComVisible(false)]
-
-// The following GUID is for the ID of the typelib if this project is exposed to COM
-[assembly: Guid("05f4c345-88fc-4e22-87c8-4e1292da6faf")]
-
-// Version information for an assembly consists of the following four values:
-//
-// Major Version
-// Minor Version
-// Build Number
-// Revision
-//
-// You can specify all the values or you can default the Build and Revision Numbers
-// by using the '*' as shown below:
-// [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("1.0.0.0")]
-[assembly: AssemblyFileVersion("1.0.0.0")]
View
8 MongoDB.Linq.Tests/TestMongoDocumentQuerySyntax.cs
@@ -11,14 +11,14 @@ namespace MongoDB.Linq.Tests {
public class TestMongoDocumentQuerySyntax {
private IMongoQuery queryable;
- private Mock<IMongoCollection> collectionMock;
- private Mock<ICursor> cursorMock;
+ private Mock<IMongoCollection<Document>> collectionMock;
+ private Mock<ICursor<Document>> cursorMock;
[SetUp]
public void Setup() {
Debug.WriteLine("initializing queryable");
- collectionMock = new Mock<IMongoCollection>();
- cursorMock = new Mock<ICursor>();
+ collectionMock = new Mock<IMongoCollection<Document>>();
+ cursorMock = new Mock<ICursor<Document>>();
collectionMock.Setup(c => c.Find(It.IsAny<Document>(), It.IsAny<int>(), It.IsAny<int>(), It.IsAny<Document>())).Returns(cursorMock.Object);
queryable = new MongoQuery(new MongoQueryProvider(collectionMock.Object));
}
View
8 MongoDB.Linq.Tests/TestQueryParsing.cs
@@ -11,14 +11,14 @@ namespace MongoDB.Linq.Tests {
public class TestQueryParsing {
private IMongoQuery queryable;
- private Mock<IMongoCollection> collectionMock;
- private Mock<ICursor> cursorMock;
+ private Mock<IMongoCollection<Document>> collectionMock;
+ private Mock<ICursor<Document>> cursorMock;
[SetUp]
public void Setup() {
Debug.WriteLine("initializing queryable");
- collectionMock = new Mock<IMongoCollection>();
- cursorMock = new Mock<ICursor>();
+ collectionMock = new Mock<IMongoCollection<Document>>();
+ cursorMock = new Mock<ICursor<Document>>();
collectionMock.Setup(c => c.Find(It.IsAny<Document>(), It.IsAny<int>(), It.IsAny<int>(), It.IsAny<Document>())).Returns(cursorMock.Object);
queryable = new MongoQuery(new MongoQueryProvider(collectionMock.Object));
}
View
2,301 MongoDB.Linq.Tests/test-results/MongoDB.Linq.Tests.csproj-Debug-2009-10-12.xml
0 additions, 2,301 deletions not shown because the diff is too large. Please use a local Git client to view these changes.
View
2 MongoDB.Linq/MongoLinqEx.cs
@@ -5,7 +5,7 @@ namespace MongoDB.Linq
{
public static class MongoLinqEx
{
- public static IMongoQuery AsQueryable<T>(this T collection) where T : IMongoCollection
+ public static IMongoQuery AsQueryable<T>(this T collection) where T : IMongoCollection<Document>
{
return new MongoQuery(new MongoQueryProvider(collection));
}
View
5 MongoDB.Linq/MongoQueryProvider.cs
@@ -13,9 +13,10 @@ private struct Result {
public bool IsFirstCall;
}
- private readonly IMongoCollection collection;
+ private readonly IMongoCollection<Document> collection;
- public MongoQueryProvider(IMongoCollection collection) {
+ public MongoQueryProvider(IMongoCollection<Document> collection)
+ {
this.collection = collection;
}
View
2 MongoDB.Net-Tests/Bson/TestBsonBinary.cs
@@ -65,4 +65,4 @@ protected static byte[] DecodeHex (string val)
}
}
-}
+}
View
17 MongoDB.Net-Tests/Bson/TestBsonReader.cs
@@ -1,4 +1,4 @@
-using System;
+using System;
using System.IO;
using System.Text;
@@ -52,7 +52,6 @@ public class TestBsonReader
public void TestReadStringDblByteCharOnEndOfBufferBoundry(){
StringBuilder sb = new StringBuilder();
sb.Append(pound, 66); //puts a pound symbol at the end of the buffer boundry but not broken.
-
string expected = sb.ToString();
Assert.AreEqual(expected, WriteAndReadString(expected));
}
@@ -160,10 +159,10 @@ public class TestBsonReader
BinaryWriter w = new BinaryWriter(ms);
int byteCount = bs.CalculateSize(val,false);
w.Write(byteCount);
- bs.WriteString(val);
+ bs.Write(val,false);
ms.Seek(0,SeekOrigin.Begin);
BsonReader reader = new BsonReader(ms);
- return reader.ReadLenString();
+ return reader.ReadLengthString();
}
@@ -173,7 +172,7 @@ public class TestBsonReader
MemoryStream ms = new MemoryStream(buf);
BsonReader reader = new BsonReader(ms);
- Document doc = reader.ReadDocument();
+ Document doc = (Document)reader.ReadObject();
Assert.IsNotNull(doc);
}
@@ -197,7 +196,7 @@ public class TestBsonReader
MemoryStream ms = new MemoryStream(buf);
BsonReader reader = new BsonReader(ms);
- Document doc = reader.ReadDocument();
+ Document doc = (Document)reader.ReadObject();
Assert.IsNotNull(doc, "Document was null");
Assert.IsTrue(doc.Contains("_id"));
@@ -216,7 +215,7 @@ public class TestBsonReader
MemoryStream ms = new MemoryStream(buf);
BsonReader reader = new BsonReader(ms);
- Document doc = reader.ReadDocument();
+ Document doc = (Document)reader.ReadObject();
Assert.IsNotNull(doc, "Document was null");
Assert.AreEqual(buf.Length, reader.Position);
Assert.IsTrue(doc.Contains("a"));
@@ -244,7 +243,7 @@ public class TestBsonReader
.Append("minkey", MongoMinKey.Value)
.Append("maxkey", MongoMaxKey.Value)
;
- writer.Write(expected);
+ writer.WriteObject(expected);
writer.Flush();
ms.Seek(0,SeekOrigin.Begin);
@@ -258,7 +257,7 @@ public class TestBsonReader
MemoryStream ms = new MemoryStream();
BsonWriter writer = new BsonWriter(ms);
- writer.Write(doc);
+ writer.WriteObject(doc);
return BitConverter.ToString(ms.ToArray()).Replace("-","");
}
View
14 MongoDB.Net-Tests/Bson/TestBsonWriter.cs
@@ -18,7 +18,7 @@ public class TestBsonWriter
MemoryStream ms = new MemoryStream();
BsonWriter writer = new BsonWriter(ms);
- Assert.AreEqual(5,writer.CalculateSize(doc));
+ Assert.AreEqual(5,writer.CalculateSizeObject(doc));
}
[Test]
@@ -31,7 +31,7 @@ public class TestBsonWriter
BsonWriter writer = new BsonWriter(ms);
//BsonDocument bdoc = BsonConvert.From(doc);
- Assert.AreEqual(21,writer.CalculateSize(doc));
+ Assert.AreEqual(21,writer.CalculateSizeObject(doc));
}
[Test]
@@ -44,15 +44,15 @@ public class TestBsonWriter
MemoryStream ms = new MemoryStream();
BsonWriter writer = new BsonWriter(ms);
- Assert.AreEqual(51,writer.CalculateSize(doc));
+ Assert.AreEqual(51,writer.CalculateSizeObject(doc));
}
[Test]
public void TestWriteString(){
MemoryStream ms = new MemoryStream();
BsonWriter writer = new BsonWriter(ms);
string expected = "54-65-73-74-73-2E-69-6E-73-65-72-74-73-00";
- writer.WriteString("Tests.inserts");
+ writer.Write("Tests.inserts",false);
string hexdump = BitConverter.ToString(ms.ToArray());
@@ -84,7 +84,7 @@ public class TestBsonWriter
private string WriteStringAndGetHex(string val){
MemoryStream ms = new MemoryStream();
BsonWriter writer = new BsonWriter(ms);
- writer.WriteString(val);
+ writer.Write(val,false);
return BitConverter.ToString(ms.ToArray());
}
@@ -95,7 +95,7 @@ public class TestBsonWriter
string expected = "1400000002746573740005000000746573740000";
Document doc = new Document().Append("test", "test");
- writer.Write(doc);
+ writer.WriteObject(doc);
string hexdump = BitConverter.ToString(ms.ToArray());
hexdump = hexdump.Replace("-","");
@@ -123,7 +123,7 @@ public class TestBsonWriter
BsonWriter writer = new BsonWriter(ms);
Document doc = new Document().Append("n", null);
try{
- writer.Write(doc);
+ writer.WriteObject(doc);
}catch(NullReferenceException){
Assert.Fail("Null Reference Exception was thrown on trying to serialize a null value");
}
View
2 MongoDB.Net-Tests/Bson/TestRoundTrips.cs
@@ -150,7 +150,7 @@ public class TestRoundTrips
MemoryStream ms = new MemoryStream();
BsonWriter writer = new BsonWriter(ms);
- writer.Write(source);
+ writer.WriteObject(source);
writer.Flush();
ms.Seek(0, SeekOrigin.Begin);
View
12 MongoDB.Net-Tests/Connections/TestConnection.cs
@@ -15,7 +15,7 @@ public class TestConnection
Connections.Connection conn = ConnectionFactory.GetConnection(string.Empty);
conn.Open();
- QueryMessage qmsg = generateQueryMessage();
+ var qmsg = generateQueryMessage();
conn.SendTwoWayMessage(qmsg);
conn.Close();
@@ -28,14 +28,14 @@ public class TestConnection
WriteBadMessage(conn);
try{
- QueryMessage qmsg = generateQueryMessage();
+ var qmsg = generateQueryMessage();
conn.SendTwoWayMessage(qmsg);
}catch(IOException){
//Should be able to resend.
Assert.IsTrue(conn.State == ConnectionState.Opened);
- QueryMessage qmsg = generateQueryMessage();
- ReplyMessage rmsg = conn.SendTwoWayMessage(qmsg);
+ var qmsg = generateQueryMessage();
+ ReplyMessage<Document> rmsg = conn.SendTwoWayMessage(qmsg);
Assert.IsNotNull(rmsg);
}
@@ -54,11 +54,11 @@ public class TestConnection
writer.Write((byte)0);
}
- protected QueryMessage generateQueryMessage(){
+ protected QueryMessage<Document> generateQueryMessage(){
Document qdoc = new Document();
qdoc.Add("listDatabases", 1.0);
//QueryMessage qmsg = new QueryMessage(qdoc,"system.namespaces");
- QueryMessage qmsg = new QueryMessage(qdoc,"admin.$cmd");
+ var qmsg = new QueryMessage<Document>(qdoc,"admin.$cmd");
qmsg.NumberToReturn = -1;
return qmsg;
View
4 MongoDB.Net-Tests/IO/TestQueryMessage.cs
@@ -16,7 +16,7 @@ public void TestAllBytesWritten()
Document query = new Document();
query.Add("col1", 1);
- QueryMessage msg = new QueryMessage(query,"TestDB.TestCol");
+ var msg = new QueryMessage<Document>(query,"TestDB.TestCol");
MemoryStream buffer = new MemoryStream();
msg.Write(buffer);
@@ -35,7 +35,7 @@ public void TestAllBytesWritten()
Document query = new Document();
query.Add("col1", 1);
- QueryMessage msg = new QueryMessage(query,"TestDB.TestCol");
+ var msg = new QueryMessage<Document>(query,"TestDB.TestCol");
MemoryStream buffer = new MemoryStream();
msg.Write(buffer);
View
6 MongoDB.Net-Tests/MongoDB.Driver.Tests.csproj
@@ -81,6 +81,10 @@
<Compile Include="Connections\TestConnectionFactory.cs" />
<Compile Include="Connections\TestPooledConnectionFactory.cs" />
<Compile Include="Connections\TestSimpleConnectionFactory.cs" />
+ <Compile Include="Serialization\ArrayFactoryTests.cs" />
+ <Compile Include="Serialization\ReflectionBuilderTests.cs" />
+ <Compile Include="Serialization\RelectionDescriptorTests.cs" />
+ <Compile Include="Serialization\SerializationTestBase.cs" />
<Compile Include="TestAuthentication.cs" />
<Compile Include="Bson\TestBsonReader.cs" />
<Compile Include="IO\TestQueryMessage.cs" />
@@ -103,7 +107,7 @@
<Compile Include="TestOid.cs" />
<Compile Include="TestOidGenerator.cs" />
<Compile Include="Bson\TestBsonWriter.cs" />
- <Compile Include="TestDatabaseJS.cs" />
+ <Compile Include="TestDatabaseJavascript.cs" />
<Compile Include="TestMapReduce.cs" />
<Compile Include="TestMapReduceBuilder.cs" />
<Compile Include="TestConcurrency.cs" />
View
2 MongoDB.Net-Tests/MongoTestBase.cs
@@ -9,7 +9,7 @@ namespace MongoDB.Driver
public abstract class MongoTestBase
{
public Mongo Mongo{get;set;}
- public Database DB{
+ public IMongoDatabase DB{
get{
return this.Mongo["tests"];
}
View
129 MongoDB.Net-Tests/Serialization/ArrayFactoryTests.cs
@@ -0,0 +1,129 @@
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using NUnit.Framework;
+
+namespace MongoDB.Driver.Serialization
+{
+ [TestFixture]
+ public class ArrayFactoryTests
+ {
+ readonly ArrayFactory _factory = new ArrayFactory();
+
+ [Test]
+ [ExpectedException(typeof(MongoException))]
+ public void CanNotCreateNonIEnumerableObjects(){
+ Type containingType;
+ _factory.Create(typeof(object), out containingType);
+ }
+
+ [Test]
+ [ExpectedException(typeof(MongoException))]
+ public void CanNotCreateObjectsForAnyInterface()
+ {
+ Type containingType;
+ _factory.Create(typeof(IDisposable), out containingType);
+ }
+
+ [Test]
+ [ExpectedException(typeof(MongoException))]
+ public void CatchExceptionsWhileCreatingArrayAndThrowMongoExceptionWithTypename()
+ {
+ Type containingType;
+ _factory.Create(typeof(Array), out containingType);
+ }
+
+ [Test]
+ public void CanCreateArrayOfInt(){
+ Type containingType;
+ var instance = _factory.Create(typeof(int[]), out containingType);
+ Assert.IsInstanceOfType(typeof(int[]), instance);
+ Assert.AreEqual(typeof(int), containingType);
+ }
+
+ [Test]
+ public void CanCreateArrayList(){
+ Type containingType;
+ var instance = _factory.Create(typeof(ArrayList), out containingType);
+ Assert.IsInstanceOfType(typeof(ArrayList),instance);
+ Assert.AreEqual(typeof(object),containingType);
+ }
+
+ [Test]
+ public void CanCreateListOfInt(){
+ Type containingType;
+ var instance = _factory.Create(typeof(List<int>), out containingType);
+ Assert.IsInstanceOfType(typeof(List<int>), instance);
+ Assert.AreEqual(typeof(int), containingType);
+ }
+
+ [Test]
+ public void CanCreateListForIEnumerable(){
+ Type containingType;
+ var instance = _factory.Create(typeof(IEnumerable), out containingType);
+ Assert.IsInstanceOfType(typeof(List<object>), instance);
+ Assert.AreEqual(typeof(object), containingType);
+ }
+
+ [Test]
+ public void CanCreateListForIEnumerableOfInt(){
+ Type containingType;
+ var instance = _factory.Create(typeof(IEnumerable<int>), out containingType);
+ Assert.IsInstanceOfType(typeof(List<int>), instance);
+ Assert.AreEqual(typeof(int), containingType);
+ }
+
+ [Test]
+ public void CanCreateListForICollection(){
+ Type containingType;
+ var instance = _factory.Create(typeof(ICollection), out containingType);
+ Assert.IsInstanceOfType(typeof(List<object>), instance);
+ Assert.AreEqual(typeof(object), containingType);
+ }
+
+ [Test]
+ public void CanCreateListForICollectionOfType()
+ {
+ Type containingType;
+ var instance = _factory.Create(typeof(ICollection<int>), out containingType);
+ Assert.IsInstanceOfType(typeof(List<int>), instance);
+ Assert.AreEqual(typeof(int), containingType);
+ }
+
+ [Test]
+ public void CanCreateQueue()
+ {
+ Type containingType;
+ var instance = _factory.Create(typeof(Queue), out containingType);
+ Assert.IsInstanceOfType(typeof(Queue), instance);
+ Assert.AreEqual(typeof(object), containingType);
+ }
+
+ [Test]
+ public void CanCreateQueueOfInt()
+ {
+ Type containingType;
+ var instance = _factory.Create(typeof(Queue<int>), out containingType);
+ Assert.IsInstanceOfType(typeof(Queue<int>), instance);
+ Assert.AreEqual(typeof(int), containingType);
+ }
+
+ [Test]
+ public void CanCreateStack()
+ {
+ Type containingType;
+ var instance = _factory.Create(typeof(Stack), out containingType);
+ Assert.IsInstanceOfType(typeof(Stack), instance);
+ Assert.AreEqual(typeof(object), containingType);
+ }
+
+ [Test]
+ public void CanCreateStackOfInt()
+ {
+ Type containingType;
+ var instance = _factory.Create(typeof(Stack<int>), out containingType);
+ Assert.IsInstanceOfType(typeof(Stack<int>), instance);
+ Assert.AreEqual(typeof(int), containingType);
+ }
+ }
+}
View
38 MongoDB.Net-Tests/Serialization/ReflectionBuilderTests.cs
@@ -0,0 +1,38 @@
+using NUnit.Framework;
+
+namespace MongoDB.Driver.Serialization
+{
+ [TestFixture]
+ public class ReflectionBuilderTests : SerializationTestBase
+ {
+ [Test]
+ public void CanSerializeASimpleObject(){
+ var bson = Serialize(new{A = "a", B = "b", C = new{D = "d"}});
+ Assert.AreEqual("KAAAAAJBAAIAAABhAAJCAAIAAABiAANDAA4AAAACRAACAAAAZAAAAA==", bson);
+ }
+
+ [Test]
+ public void CanSerializeAnSimpleArray(){
+ var bson = Serialize(new{A = new []{1,2}});
+ Assert.AreEqual("GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA", bson);
+ }
+
+ [Test]
+ public void CanSerializeAnObjectArray(){
+ var bson = Serialize(new { A = new object[] { new { B = "b" }, new { C = "c" } } });
+ Assert.AreEqual("LwAAAARBACcAAAADMAAOAAAAAkIAAgAAAGIAAAMxAA4AAAACQwACAAAAYwAAAAA=", bson);
+ }
+
+ [Test]
+ public void CanSerializeAnDocumentPreperty(){
+ var bson = Serialize(new{A = new Document().Append("B", "b")});
+ Assert.AreEqual("FgAAAANBAA4AAAACQgACAAAAYgAAAA==", bson);
+ }
+
+ [Test]
+ public void CanSerializeAnDocument(){
+ var bson = Serialize(new Document().Append("A","a"));
+ Assert.AreEqual("DgAAAAJBAAIAAABhAAA=", bson);
+ }
+ }
+}
View
163 MongoDB.Net-Tests/Serialization/RelectionDescriptorTests.cs
@@ -0,0 +1,163 @@
+using System.Collections;
+using System.Collections.Generic;
+using NUnit.Framework;
+
+namespace MongoDB.Driver.Serialization
+{
+ [TestFixture]
+ public class RelectionDescriptorTests : SerializationTestBase
+ {
+ public class SimpleObject
+ {
+ public string A { get; set; }
+ public string B { get; set; }
+ public SimpleObjectC C { get; set; }
+ }
+ public class SimpleObjectC
+ {
+ public string D { get; set; }
+ }
+
+ [Test]
+ public void CanDeserializeASimpleObject(){
+ const string bson = "KAAAAAJBAAIAAABhAAJCAAIAAABiAANDAA4AAAACRAACAAAAZAAAAA==";
+ var simpleObject = Deserialize<SimpleObject>(bson);
+ Assert.IsNotNull(simpleObject);
+ Assert.AreEqual("a", simpleObject.A);
+ Assert.AreEqual("b",simpleObject.B);
+ Assert.IsNotNull(simpleObject.C);
+ Assert.AreEqual("d",simpleObject.C.D);
+ }
+
+ public class SimpleArrayAsList
+ {
+ public List<object> A { get; set; }
+ }
+
+ [Test]
+ public void CanDeserializeAnSimpleArrayAsList(){
+ const string bson = "GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA";
+ var simpleArray = Deserialize<SimpleArrayAsList>(bson);
+ Assert.IsNotNull(simpleArray);
+ Assert.IsNotNull(simpleArray.A);
+ Assert.AreEqual(2, simpleArray.A.Count);
+ Assert.Contains(1, simpleArray.A);
+ Assert.Contains(2, simpleArray.A);
+ }
+
+ public class SimpleArrayAsListOfInt
+ {
+ public List<int> A { get; set; }
+ }
+
+ [Test]
+ public void CanDeserializeAnSimpleArrayAsListOfInt()
+ {
+ const string bson = "GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA";
+ var simpleArray = Deserialize<SimpleArrayAsListOfInt>(bson);
+ Assert.IsNotNull(simpleArray);
+ Assert.IsNotNull(simpleArray.A);
+ Assert.AreEqual(2, simpleArray.A.Count);
+ Assert.Contains(1, simpleArray.A);
+ Assert.Contains(2, simpleArray.A);
+ }
+
+ public class SimpleArrayAsArrayList
+ {
+ public ArrayList A { get; set; }
+ }
+
+ [Test]
+ public void CanDeserializeAnSimpleArrayAsArrayList()
+ {
+ const string bson = "GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA";
+ var simpleArray = Deserialize<SimpleArrayAsArrayList>(bson);
+ Assert.IsNotNull(simpleArray);
+ Assert.IsNotNull(simpleArray.A);
+ Assert.AreEqual(2, simpleArray.A.Count);
+ Assert.Contains(1, simpleArray.A);
+ Assert.Contains(2, simpleArray.A);
+ }
+
+ public class SimpleArrayAsIEnumerable
+ {
+ public IEnumerable A { get; set; }
+ }
+
+ [Test]
+ public void CanDeserializeAnSimpleArrayAsIEnumerable()
+ {
+ const string bson = "GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA";
+ var simpleArray = Deserialize<SimpleArrayAsIEnumerable>(bson);
+ Assert.IsNotNull(simpleArray);
+ Assert.IsNotNull(simpleArray.A);
+ var list = new List<object>();
+ foreach(var value in simpleArray.A)
+ list.Add(value);
+ Assert.AreEqual(2, list.Count);
+ Assert.Contains(1, list);
+ Assert.Contains(2, list);
+ }
+
+ public class SimpleArrayAsIEnumerableOfInt
+ {
+ public IEnumerable<int> A { get; set; }
+ }
+
+ [Test]
+ public void CanDeserializeAnSimpleArrayAsIEnumerableOfInt(){
+ const string bson = "GwAAAARBABMAAAAQMAABAAAAEDEAAgAAAAAA";
+ var simpleArray = Deserialize<SimpleArrayAsIEnumerableOfInt>(bson);
+ Assert.IsNotNull(simpleArray);
+ Assert.IsNotNull(simpleArray.A);
+ var list = new List<int>(simpleArray.A);
+ Assert.AreEqual(2, list.Count);
+ Assert.Contains(1, list);
+ Assert.Contains(2, list);
+ }
+
+ public class ObjectArray
+ {
+ public IList<ObjectArrayA> A { get; set; }
+ }
+
+ public class ObjectArrayA
+ {
+ public string B { get; set; }
+ }
+
+ [Test]
+ public void CanDeserializeAnObjectArray(){
+ const string bson = "HgAAAARBABYAAAADMAAOAAAAAkIAAgAAAGIAAAAA";
+ var objectArray = Deserialize<ObjectArray>(bson);
+ Assert.IsNotNull(objectArray);
+ Assert.IsNotNull(objectArray.A);
+ Assert.AreEqual(1,objectArray.A.Count);
+ Assert.IsNotNull(objectArray.A[0].B);
+ Assert.AreEqual("b", objectArray.A[0].B);
+ }
+
+ public class DocumentProperty
+ {
+ public Document A { get; set; }
+ }
+
+ [Test]
+ public void CanDeserializeAnDocumentPreperty(){
+ const string bson = "FgAAAANBAA4AAAACQgACAAAAYgAAAA==";
+ var documentProperty = Deserialize<DocumentProperty>(bson);
+ Assert.IsNotNull(documentProperty);
+ Assert.IsNotNull(documentProperty.A);
+ Assert.AreEqual("b",documentProperty.A["B"]);
+ }
+
+ [Test]
+ public void CanSerializeAnDocument(){
+ const string bson = "DgAAAAJBAAIAAABhAAA=";
+ var document = Deserialize<Document>(bson);
+ Assert.IsNotNull(document);
+ Assert.AreEqual(1,document.Count);
+ Assert.AreEqual("a",document["A"]);
+ }
+ }
+}
View
49 MongoDB.Net-Tests/Serialization/SerializationTestBase.cs
@@ -0,0 +1,49 @@
+using System;
+using System.IO;
+using MongoDB.Driver.Bson;
+
+namespace MongoDB.Driver.Serialization
+{
+ public abstract class SerializationTestBase
+ {
+ protected string Serialize(Document document)
+ {
+ using(var mem = new MemoryStream())
+ {
+ var writer = new BsonWriter(mem, new DocumentDescriptor());
+ writer.WriteObject(document);
+ writer.Flush();
+ return Convert.ToBase64String(mem.ToArray());
+ }
+ }
+
+ protected string Serialize(object instance)
+ {
+ using(var mem = new MemoryStream())
+ {
+ var writer = new BsonWriter(mem, new ReflectionDescriptor());
+ writer.WriteObject(instance);
+ writer.Flush();
+ return Convert.ToBase64String(mem.ToArray());
+ }
+ }
+
+ protected T Deserialize<T>(string base64)
+ {
+ using(var mem = new MemoryStream(Convert.FromBase64String(base64)))
+ {
+ var reader = new BsonReader(mem, new ReflectionBuilder<T>());
+ return (T)reader.ReadObject();
+ }
+ }
+
+ protected Document DeserializeDocument(string base64)
+ {
+ using(var mem = new MemoryStream(Convert.FromBase64String(base64)))
+ {
+ var reader = new BsonReader(mem);
+ return (Document)reader.ReadObject();
+ }
+ }
+ }
+}
View
49 MongoDB.Net-Tests/TestCollection.cs
@@ -18,14 +18,14 @@ public class TestCollection : MongoTestBase
}
public override void OnInit (){
- IMongoCollection finds = DB["finds"];
+ IMongoCollection<Document> finds = DB["finds"];
for(int j = 1; j < 100; j++){
finds.Insert(new Document(){{"x", 4},{"h", "hi"},{"j", j}});
}
for(int j = 100; j < 105; j++){
finds.Insert(new Document(){{"x", 4},{"n", 1},{"j", j}});
}
- IMongoCollection charreads = DB["charreads"];
+ IMongoCollection<Document> charreads = DB["charreads"];
charreads.Insert(new Document(){{"test", "1234" + pound + "56"}});
}
@@ -64,7 +64,7 @@ public class TestCollection : MongoTestBase
Document fields = new Document();
fields["x"] = 1;
- ICursor c = DB["finds"].Find(query,-1,0,fields);
+ ICursor<Document> c = DB["finds"].Find(query, -1, 0, fields);
foreach(Document result in c.Documents){
Assert.IsNotNull(result);
Assert.AreEqual(4, result["x"]);
@@ -77,7 +77,7 @@ public class TestCollection : MongoTestBase
Document query = new Document();
query["j"] = new Document().Append("$gt",20);
- ICursor c = DB["finds"].Find(query);
+ ICursor<Document> c = DB["finds"].Find(query);
foreach(Document result in c.Documents){
Assert.IsNotNull(result);
Object j = result["j"];
@@ -88,7 +88,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestManualWhere(){
Document query = new Document().Append("$where", new Code("this.j % 2 == 0"));
- ICursor c = DB["finds"].Find(query);
+ ICursor<Document> c = DB["finds"].Find(query);
foreach(Document result in c.Documents){
Assert.IsNotNull(result);
Object j = result["j"];
@@ -97,7 +97,7 @@ public class TestCollection : MongoTestBase
}
[Test]
public void TestFindWhereEquivalency(){
- IMongoCollection col = DB["finds"];
+ IMongoCollection<Document> col = DB["finds"];
Document lt = new Document().Append("j", new Document().Append("$lt", 5));
string where = "this.j < 5";
Document explicitWhere = new Document().Append("$where", new Code(where));
@@ -110,7 +110,8 @@ public class TestCollection : MongoTestBase
Assert.AreEqual(4, CountDocs(col.Find(funcDoc)), "Function where didn't return 4 docs");
}
- private int CountDocs(ICursor cur){
+ private int CountDocs(ICursor<Document> cur)
+ {
int cnt = 0;
foreach(Document doc in cur.Documents){
cnt++;
@@ -119,7 +120,7 @@ public class TestCollection : MongoTestBase
}
[Test]
public void TestWhere(){
- ICursor c = DB["finds"].Find("this.j % 2 == 0");
+ ICursor<Document> c = DB["finds"].Find("this.j % 2 == 0");
foreach(Document result in c.Documents){
Assert.IsNotNull(result);
Object j = result["j"];
@@ -138,7 +139,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestSimpleInsert(){
- IMongoCollection inserts = DB["inserts"];
+ IMongoCollection<Document> inserts = DB["inserts"];
Document indoc = new Document();
indoc["song"] = "Palmdale";
indoc["artist"] = "Afroman";
@@ -153,7 +154,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestReallySimpleInsert(){
- IMongoCollection inserts = DB["inserts"];
+ IMongoCollection<Document> inserts = DB["inserts"];
Document indoc = new Document();
indoc["y"] = 1;
indoc["x"] = 2;
@@ -166,7 +167,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestPoundSymbolInsert(){
- IMongoCollection inserts = DB["inserts"];
+ IMongoCollection<Document> inserts = DB["inserts"];
Document indoc = new Document().Append("x","1234" + pound + "56").Append("y",1);;
inserts.Insert(indoc);
@@ -177,7 +178,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestArrayInsert(){
- IMongoCollection inserts = DB["inserts"];
+ IMongoCollection<Document> inserts = DB["inserts"];
Document indoc1 = new Document();
indoc1["song"] = "The Axe";
indoc1["artist"] = "Tinsley Ellis";
@@ -202,7 +203,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestInsertOfArray(){
OidGenerator ogen = new OidGenerator();
- IMongoCollection inserts = DB["inserts"];
+ IMongoCollection<Document> inserts = DB["inserts"];
Document album = new Document();
album["_id"] = ogen.Generate();
album["artist"] = "Popa Chubby";
@@ -226,7 +227,7 @@ public class TestCollection : MongoTestBase
public void TestInsertLargerThan4MBDocument(){
Binary b = new Binary(new byte[1024 * 1024]);
Document big = new Document(){{"name", "Big Document"}, {"b1", b}, {"b2", b}, {"b3", b}, {"b4", b}};
- IMongoCollection inserts = DB["inserts"];
+ IMongoCollection<Document> inserts = DB["inserts"];
bool thrown = false;
try{
inserts.Insert(big);
@@ -241,7 +242,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestInsertBulkLargerThan4MBOfDocuments(){
Binary b = new Binary(new byte[1024 * 1024 * 2]);
- IMongoCollection inserts = DB["inserts"];
+ IMongoCollection<Document> inserts = DB["inserts"];
try{
Document[] docs = new Document[10];
//6MB+ of documents
@@ -258,7 +259,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestDelete(){
- IMongoCollection deletes = DB["deletes"];
+ IMongoCollection<Document> deletes = DB["deletes"];
Document doc = new Document();
doc["y"] = 1;
doc["x"] = 2;
@@ -278,7 +279,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestUpdateUpsertNotExisting(){
- IMongoCollection updates = DB["updates"];
+ IMongoCollection<Document> updates = DB["updates"];
Document doc = new Document();
doc["First"] = "Sam";
doc["Last"] = "CorderNE";
@@ -292,7 +293,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestUpdateUpsertExisting(){
- IMongoCollection updates = DB["updates"];
+ IMongoCollection<Document> updates = DB["updates"];
Document doc = new Document();
doc["First"] = "Mtt";
doc["Last"] = "Brewer";
@@ -316,14 +317,14 @@ public class TestCollection : MongoTestBase
[Test]
public void TestUpdateMany(){
- IMongoCollection updates = DB["updates"];
+ IMongoCollection<Document> updates = DB["updates"];
updates.Insert(new Document().Append("Last", "Cordr").Append("First","Sam"));
updates.Insert(new Document().Append("Last", "Cordr").Append("First","Sam2"));
updates.Insert(new Document().Append("Last", "Cordr").Append("First","Sam3"));
Document selector = new Document().Append("Last", "Cordr");
- ICursor results = updates.Find(selector);
+ ICursor<Document> results = updates.Find(selector);
bool found = false;
foreach(Document doc in results.Documents){
Assert.AreEqual("Cordr", doc["Last"]);
@@ -351,7 +352,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestUpdatePartial(){
- IMongoCollection updates = DB["updates"];
+ IMongoCollection<Document> updates = DB["updates"];
int coolness = 5;
Document einstein = new Document(){{"Last", "Einstien"},{"First", "Albert"},{"Coolness",coolness++}};
updates.Insert(einstein);
@@ -367,7 +368,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestCount(){
- IMongoCollection counts = DB["counts"];
+ IMongoCollection<Document> counts = DB["counts"];
int top = 100;
for(int i = 0; i < top; i++){
counts.Insert(new Document().Append("Last", "Cordr").Append("First","Sam").Append("cnt", i));
@@ -378,7 +379,7 @@ public class TestCollection : MongoTestBase
[Test]
public void TestCountWithSpec(){
- IMongoCollection counts = DB["counts_spec"];
+ IMongoCollection<Document> counts = DB["counts_spec"];
counts.Insert(new Document().Append("Last", "Cordr").Append("First","Sam").Append("cnt", 1));
counts.Insert(new Document().Append("Last", "Cordr").Append("First","Sam").Append("cnt", 2));