-
Notifications
You must be signed in to change notification settings - Fork 16
/
MongoPersistenceSpecificTests.cs
322 lines (269 loc) · 12.4 KB
/
MongoPersistenceSpecificTests.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Bson.Serialization.Options;
using MongoDB.Driver;
using MongoDB.Driver.Core.Events;
using NStore.Core.Persistence;
using NStore.Persistence.Tests;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace NStore.Persistence.Mongo.Tests
{
public class CustomChunk : MongoChunk
{
public DateTime CreateAt { get; private set; }
[BsonDictionaryOptions(DictionaryRepresentation.ArrayOfArrays)]
public IDictionary<string, string> CustomHeaders { get; set; } =
new Dictionary<string, string>();
public CustomChunk()
{
this.CreateAt = new DateTime(2017, 1, 1, 10, 12, 13).ToUniversalTime();
this.CustomHeaders["test.1"] = "a";
}
}
public class TestMongoPayloadSerializer : IMongoPayloadSerializer
{
public List<Object> SerializedPayloads { get; set; } = new List<object>();
public List<Object> DeserializedPayloads { get; set; } = new List<object>();
public object Deserialize(object payload)
{
DeserializedPayloads.Add(payload);
return payload;
}
public object Serialize(object payload)
{
SerializedPayloads.Add(payload);
return payload;
}
}
public class mongo_persistence_with_custom_chunk_type : BasePersistenceTest
{
protected override IMongoPersistence CreatePersistence(MongoPersistenceOptions options)
{
return new MongoPersistence<CustomChunk>(options);
}
[Fact]
public async Task can_write_custom_data()
{
var persisted = (CustomChunk)await Store.AppendAsync("a", 1, "data");
var collection = GetCollection<CustomChunk>();
var read_from_collection = await (await collection.FindAsync(FilterDefinition<CustomChunk>.Empty)).FirstAsync();
Assert.Equal("a", read_from_collection.CustomHeaders["test.1"]);
Assert.Equal(persisted.CreateAt, read_from_collection.CreateAt);
}
[Fact]
public async Task can_read_custom_data()
{
var persisted = (CustomChunk)await Store.AppendAsync("a", 1, "data");
var read = (CustomChunk)await Store.ReadSingleBackwardAsync("a");
Assert.Equal("a", read.CustomHeaders["test.1"]);
Assert.Equal(persisted.CreateAt, read.CreateAt);
}
}
public class empty_payload_serialization : BasePersistenceTest
{
public class SerializerSpy : IMongoPayloadSerializer
{
public int SerializeCount { get; private set; }
public int DeserializeCount { get; private set; }
public object Serialize(object payload)
{
SerializeCount++;
return payload;
}
public object Deserialize(object payload)
{
DeserializeCount++;
return payload;
}
}
private SerializerSpy _serializer;
protected override IMongoPersistence CreatePersistence(MongoPersistenceOptions options)
{
_serializer = new SerializerSpy();
options.MongoPayloadSerializer = _serializer;
return new MongoPersistence(options);
}
[Fact]
public async Task empty_payload_should_be_serialized()
{
await Store.AppendAsync("a", 1, "payload").ConfigureAwait(false);
await Assert.ThrowsAsync<DuplicateStreamIndexException>(() =>
Store.AppendAsync("a", 1, "payload")
).ConfigureAwait(false);
// Counter progression
// 1 first ok
// 2 second ko
// 3 empty
Assert.Equal(3, _serializer.SerializeCount);
}
}
public class filler_tests : BasePersistenceTest
{
[Fact]
public async Task filler_should_regenerate_operation_id()
{
await Store.AppendAsync("::empty", 1, "payload", "op1").ConfigureAwait(false);
var cts = new CancellationTokenSource(2000);
var result = await Store.AppendAsync("::empty", 2, "payload", "op1", cts.Token).ConfigureAwait(false);
Assert.Null(result);
var recorder = new Recorder();
await Store.ReadAllAsync(0, recorder, 100).ConfigureAwait(false);
Assert.Collection(recorder.Chunks,
c => Assert.Equal("op1", c.OperationId),
c => Assert.Equal("_2", c.OperationId)
);
}
}
public class When_using_custom_payload_serializer : BasePersistenceTest
{
private TestMongoPayloadSerializer _testMongoPayloadSerializer;
protected internal override MongoPersistenceOptions GetMongoPersistenceOptions()
{
//Add your custom payload serializer.
var options = base.GetMongoPersistenceOptions();
if (_testMongoPayloadSerializer == null)
{
_testMongoPayloadSerializer = new TestMongoPayloadSerializer();
}
options.MongoPayloadSerializer = _testMongoPayloadSerializer;
return options;
}
[Fact()]
public async Task Verify_payload_serializer_is_called_for_basic_append_and_read()
{
// Write to a simple stream and verify that indeed the wrapping is happening.
await Store.AppendAsync("test1", 1, "CHUNK1").ConfigureAwait(false);
Assert.Equal("CHUNK1", _testMongoPayloadSerializer.SerializedPayloads.Single());
//read from the stream
await Store.GetAllChunksForAPartition("test1").ConfigureAwait(false);
Assert.Equal("CHUNK1", _testMongoPayloadSerializer.DeserializedPayloads.Single());
}
[Fact()]
public async Task Verify_payload_serializer_is_called_for_backward_read()
{
await Store.AppendAsync("test1", 1, "CHUNK1").ConfigureAwait(false);
await Store.ReadSingleBackwardAsync("test1").ConfigureAwait(false);
Assert.Equal("CHUNK1", _testMongoPayloadSerializer.SerializedPayloads.Single());
//verify that reading a non existent we do not throw exception and no serializer is called
var result = await Store.ReadSingleBackwardAsync("non-existent").ConfigureAwait(false);
Assert.Equal("CHUNK1", _testMongoPayloadSerializer.DeserializedPayloads.Single());
//this is useful to verify that null can be returned without throwing any null exception
Assert.Null(result);
//now use another api, for backward reading.
await Store.ReadBackwardAsync("test1", long.MaxValue, EmptySubscription).ConfigureAwait(false);
Assert.Equal(new List<object>() { "CHUNK1" , "CHUNK1" }, _testMongoPayloadSerializer.DeserializedPayloads);
await Store.ReadBackwardAsync("non-existent", long.MaxValue, EmptySubscription).ConfigureAwait(false);
Assert.Equal(new List<object>() { "CHUNK1", "CHUNK1" }, _testMongoPayloadSerializer.DeserializedPayloads);
}
[Fact()]
public async Task Verify_payload_serializer_is_called_for_read_by_operation_async()
{
var operationId = Guid.NewGuid().ToString();
await Store.AppendAsync("test1", 1, "CHUNK1", operationId).ConfigureAwait(false);
await Store.ReadByOperationIdAsync("test1", operationId, CancellationToken.None).ConfigureAwait(false);
Assert.Equal("CHUNK1", _testMongoPayloadSerializer.SerializedPayloads.Single());
}
[Fact()]
public async Task Verify_payload_serializer_is_called_for_read_all_by_operation_async()
{
var operationId = Guid.NewGuid().ToString();
await Store.AppendAsync("test1", 1, "CHUNK1", operationId).ConfigureAwait(false);
//remember to use different partition id or the second operation will be idempotent
await Store.AppendAsync("test3", 1, "CHUNK2", operationId).ConfigureAwait(false);
//now read all by operation id, we should have two deserialization
await Store.ReadAllByOperationIdAsync(operationId, EmptySubscription);
Assert.Equal(new List<object>() { "CHUNK1", "CHUNK2" }, _testMongoPayloadSerializer.DeserializedPayloads);
}
}
public class Can_intercept_mongo_query_with_options : BasePersistenceTest
{
private Int32 callCount;
protected internal override MongoPersistenceOptions GetMongoPersistenceOptions()
{
var options = base.GetMongoPersistenceOptions();
options.CustomizePartitionClientSettings = mongoClientSettings =>
mongoClientSettings.ClusterConfigurator = clusterConfigurator =>
{
clusterConfigurator.Subscribe<CommandSucceededEvent>(_ => callCount++);
};
return options;
}
[Fact()]
public async Task Verify_that_after_append_async_we_have_intercepted_the_call()
{
callCount = 0;
// Repo1 writes to a stream
await Store.AppendAsync("test1", 1, "CHUNK1").ConfigureAwait(false);
Assert.Equal(1, callCount);
}
}
public abstract class insert_id_already_existing_base : BasePersistenceTest
{
protected internal override MongoPersistenceOptions GetMongoPersistenceOptions()
{
var options = base.GetMongoPersistenceOptions();
options.UseLocalSequence = GetUseLocalSequence();
options.SequenceCollectionName = "sequence_test";
return options;
}
protected abstract bool GetUseLocalSequence();
[Fact]
public async Task resilient_to_multiple_persistence_write_concurrently()
{
IPersistence store2 = Create(false);
string partition1 = Guid.NewGuid().ToString();
string partition2 = Guid.NewGuid().ToString();
await Store.AppendAsync(partition1, 1, new { data = "first attempt" }).ConfigureAwait(false);
//now store2 inserts a chunk with another id
await store2.AppendAsync(partition2, 1, new { data = "first attempt" }).ConfigureAwait(false);
var chunk = await Store.AppendAsync(partition1, 2, new { data = "second data" }).ConfigureAwait(false);
Assert.Equal(3, chunk.Position);
}
}
public class insert_id_already_existing_base_local_sequence : insert_id_already_existing_base
{
protected override bool GetUseLocalSequence()
{
return true;
}
}
public class insert_id_already_existing_base_db_sequence : insert_id_already_existing_base
{
protected override bool GetUseLocalSequence()
{
return false;
}
}
/// <summary>
/// Correctly initialize the seed when you want to use the sequence generated it
/// </summary>
public class Sequence_generator_id_is_initialized_correctly : BasePersistenceTest
{
private MongoPersistenceOptions _options;
protected internal override MongoPersistenceOptions GetMongoPersistenceOptions()
{
_options = base.GetMongoPersistenceOptions();
_options.UseLocalSequence = false;
_options.SequenceCollectionName = "sequence_test";
return _options;
}
[Fact()]
public void Verify_that_after_persistence_initialization_sequence_collection_is_populated()
{
// We need to be sure that the record was correctly created
var url = new MongoUrl(_options.PartitionsConnectionString);
var client = new MongoClient(url);
var db = client.GetDatabase(url.DatabaseName);
var coll = db.GetCollection<BsonDocument>(_options.SequenceCollectionName);
var sequenceDocument = coll.AsQueryable().SingleOrDefault();
Assert.NotNull(sequenceDocument);
Assert.Equal("streams", sequenceDocument["_id"].AsString);
Assert.Equal(0L, sequenceDocument["LastValue"].AsInt64);
}
}
}