Skip to content

Commit

Permalink
Fix for #157
Browse files Browse the repository at this point in the history
- mongodb.filter support with or without "o." prefix
  • Loading branch information
richardwilly98 committed Oct 11, 2013
1 parent acef547 commit f2c7920
Show file tree
Hide file tree
Showing 6 changed files with 250 additions and 16 deletions.
Expand Up @@ -154,7 +154,7 @@ public void start() {
logger.info(
"starting mongodb stream. options: secondaryreadpreference [{}], drop_collection [{}], include_collection [{}], throttlesize [{}], gridfs [{}], filter [{}], db [{}], collection [{}], script [{}], indexing to [{}]/[{}]",
definition.isMongoSecondaryReadPreference(), definition.isDropCollection(), definition.getIncludeCollection(),
definition.getThrottleSize(), definition.isMongoGridFS(), definition.getMongoFilter(), definition.getMongoDb(),
definition.getThrottleSize(), definition.isMongoGridFS(), definition.getMongoOplogFilter(), definition.getMongoDb(),
definition.getMongoCollection(), definition.getScript(), definition.getIndexName(), definition.getTypeName());

// Create the index if it does not exist
Expand Down
Expand Up @@ -16,6 +16,7 @@
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;

import org.bson.BasicBSONObject;
import org.bson.types.BSONTimestamp;
import org.elasticsearch.common.Preconditions;
import org.elasticsearch.common.collect.Maps;
Expand All @@ -27,9 +28,12 @@
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptService;

import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.MongoClientOptions;
import com.mongodb.ReadPreference;
import com.mongodb.ServerAddress;
import com.mongodb.util.JSON;

public class MongoDBRiverDefinition {

Expand Down Expand Up @@ -86,7 +90,8 @@ public class MongoDBRiverDefinition {
private final String mongoDb;
private final String mongoCollection;
private final boolean mongoGridFS;
private final String mongoFilter;
private final BasicDBObject mongoOplogFilter;
private final BasicDBObject mongoCollectionFilter;
// mongodb.credentials
private final String mongoAdminUser;
private final String mongoAdminPassword;
Expand Down Expand Up @@ -127,7 +132,8 @@ public static class Builder {
private String mongoDb;
private String mongoCollection;
private boolean mongoGridFS;
private String mongoFilter = "";
private BasicDBObject mongoOplogFilter = new BasicDBObject();
private BasicDBObject mongoCollectionFilter = new BasicDBObject();
// mongodb.credentials
private String mongoAdminUser = "";
private String mongoAdminPassword = "";
Expand Down Expand Up @@ -187,8 +193,13 @@ public Builder mongoGridFS(boolean mongoGridFS) {
return this;
}

public Builder mongoFilter(String mongoFilter) {
this.mongoFilter = mongoFilter;
public Builder mongoOplogFilter(BasicDBObject mongoOplogFilter) {
this.mongoOplogFilter = mongoOplogFilter;
return this;
}

public Builder mongoCollectionFilter(BasicDBObject mongoCollectionFilter) {
this.mongoCollectionFilter = mongoCollectionFilter;
return this;
}

Expand Down Expand Up @@ -516,9 +527,13 @@ public synchronized static MongoDBRiverDefinition parseSettings(String riverName
builder.mongoCollection(XContentMapValues.nodeStringValue(mongoSettings.get(COLLECTION_FIELD), riverName));
builder.mongoGridFS(XContentMapValues.nodeBooleanValue(mongoSettings.get(GRIDFS_FIELD), false));
if (mongoSettings.containsKey(FILTER_FIELD)) {
builder.mongoFilter(XContentMapValues.nodeStringValue(mongoSettings.get(FILTER_FIELD), ""));
} else {
builder.mongoFilter("");
String filter = XContentMapValues.nodeStringValue(mongoSettings.get(FILTER_FIELD), "");
filter = removePrefix("o.", filter);
builder.mongoCollectionFilter(convertToBasicDBObject(filter));
// DBObject bsonObject = (DBObject) JSON.parse(filter);
builder.mongoOplogFilter(convertToBasicDBObject(addPrefix("o.", filter)));
// } else {
// builder.mongoOplogFilter("");
}

if (mongoSettings.containsKey(SCRIPT_FIELD)) {
Expand Down Expand Up @@ -596,6 +611,50 @@ public void checkClientTrusted(X509Certificate[] chain, String authType) throws
return SSLSocketFactory.getDefault();
}

static BasicDBObject convertToBasicDBObject
(String object) {
if (object == null || object.length() == 0) {
return new BasicDBObject();
} else {
return (BasicDBObject)JSON.parse(object);
}
}

static String removePrefix(String prefix, String object) {
return addRemovePrefix(prefix, object, false);
}

static String addPrefix(String prefix, String object) {
return addRemovePrefix(prefix, object, true);
}

static String addRemovePrefix(String prefix, String object, boolean add) {
if (prefix == null) {
throw new IllegalArgumentException("prefix");
}
if (object == null) {
throw new NullPointerException("object");
}
if (object.length() == 0) {
return "";
}
DBObject bsonObject = (DBObject) JSON.parse(object);

BasicBSONObject newObject = new BasicBSONObject();
for (String key : bsonObject.keySet()) {
if (add) {
newObject.put(prefix + key, bsonObject.get(key));
} else {
if (key.startsWith(prefix)) {
newObject.put(key.substring(prefix.length()), bsonObject.get(key));
} else {
newObject.put(key, bsonObject.get(key));
}
}
}
return newObject.toString();
}

private MongoDBRiverDefinition(final Builder builder) {
// river
this.riverName = builder.riverName;
Expand All @@ -607,7 +666,8 @@ private MongoDBRiverDefinition(final Builder builder) {
this.mongoDb = builder.mongoDb;
this.mongoCollection = builder.mongoCollection;
this.mongoGridFS = builder.mongoGridFS;
this.mongoFilter = builder.mongoFilter;
this.mongoOplogFilter = builder.mongoOplogFilter;
this.mongoCollectionFilter = builder.mongoCollectionFilter;
// mongodb.credentials
this.mongoAdminUser = builder.mongoAdminUser;
this.mongoAdminPassword = builder.mongoAdminPassword;
Expand Down Expand Up @@ -664,8 +724,12 @@ public boolean isMongoGridFS() {
return mongoGridFS;
}

public String getMongoFilter() {
return mongoFilter;
public BasicDBObject getMongoOplogFilter() {
return mongoOplogFilter;
}

public BasicDBObject getMongoCollectionFilter() {
return mongoCollectionFilter;
}

public String getMongoAdminUser() {
Expand Down
9 changes: 4 additions & 5 deletions src/main/java/org/elasticsearch/river/mongodb/Slurper.java
Expand Up @@ -30,7 +30,6 @@
import com.mongodb.gridfs.GridFS;
import com.mongodb.gridfs.GridFSDBFile;
import com.mongodb.gridfs.GridFSFile;
import com.mongodb.util.JSON;

class Slurper implements Runnable {

Expand Down Expand Up @@ -130,7 +129,7 @@ protected boolean isIndexEmpty() {
* Does an initial sync the same way MongoDB does.
* https://groups.google.com/
* forum/?fromgroups=#!topic/mongodb-user/sOKlhD_E2ns
*
*
* @return the last oplog timestamp before the import began
* @throws InterruptedException
* if the blocking queue stream is interrupted while waiting
Expand All @@ -142,7 +141,7 @@ protected BSONTimestamp doInitialImport() throws InterruptedException {
DBCursor cursor = null;
try {
if (!definition.isMongoGridFS()) {
cursor = slurpedCollection.find();
cursor = slurpedCollection.find(definition.getMongoCollectionFilter());
while (cursor.hasNext()) {
DBObject object = cursor.next();
addToStream(MongoDBRiver.OPLOG_INSERT_OPERATION, null, applyFieldFilter(object));
Expand Down Expand Up @@ -352,7 +351,7 @@ private DBObject getOplogFilter(final BSONTimestamp time) {
+ MongoDBRiver.OPLOG_NAMESPACE_COMMAND));
filter.put(MongoDBRiver.MONGODB_OR_OPERATOR, values2);
}
if (!definition.getMongoFilter().isEmpty()) {
if (definition.getMongoOplogFilter().size() > 0) {
filter.putAll(getMongoFilter());
}
if (logger.isDebugEnabled()) {
Expand All @@ -376,7 +375,7 @@ private DBObject getMongoFilter() {
filters2.add(new BasicDBObject(MongoDBRiver.MONGODB_OR_OPERATOR, filters3));

// include custom filter in filters2
filters2.add((DBObject) JSON.parse(definition.getMongoFilter()));
filters2.add(definition.getMongoOplogFilter());

filters.add(new BasicDBObject(MongoDBRiver.MONGODB_AND_OPERATOR, filters2));

Expand Down
Expand Up @@ -11,7 +11,9 @@
import org.testng.Assert;
import org.testng.annotations.Test;

import com.mongodb.BasicDBObject;
import com.mongodb.ServerAddress;
import com.mongodb.util.JSON;

public class MongoDBRiverDefinitionTest {

Expand Down Expand Up @@ -67,4 +69,28 @@ public void testLoadMongoDBRiverDefinitionIssue159() {
Assert.fail("testLoadMongoDBRiverDefinitionIssue159 failed", t);
}
}

@Test
public void parseFilter() {
String filter = "{\"o.lang\":\"de\"}";
BasicDBObject bsonFilter = (BasicDBObject) JSON.parse(filter);
String filterNoPrefix = MongoDBRiverDefinition.removePrefix("o.", filter);
Assert.assertNotNull(filterNoPrefix);

BasicDBObject bsonFilterNoPrefix = (BasicDBObject) JSON.parse(filterNoPrefix);
Assert.assertNotNull(bsonFilterNoPrefix);

// call a second time trimPrefix has no effect
String filterNoPrefix2 = MongoDBRiverDefinition.removePrefix("o.", bsonFilterNoPrefix.toString());
Assert.assertNotNull(filterNoPrefix2);
BasicDBObject bsonFilterNoPrefix2 = (BasicDBObject) JSON.parse(filterNoPrefix2);
Assert.assertEquals(bsonFilterNoPrefix, bsonFilterNoPrefix2);

String filterWithPrefix = MongoDBRiverDefinition.addPrefix("o.", filterNoPrefix);
BasicDBObject bsonFilterWithPrefix = (BasicDBObject) JSON.parse(filterWithPrefix);
Assert.assertNotNull(bsonFilterWithPrefix);
// trimPrefix + addPrefix returns the original bson
Assert.assertEquals(bsonFilter, bsonFilterWithPrefix);
}

}
@@ -0,0 +1,121 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.river.mongodb.simple;

import static org.elasticsearch.client.Requests.countRequest;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;

import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.river.mongodb.RiverMongoDBTestAbstract;
import org.testng.Assert;
import org.testng.annotations.Test;

import com.google.common.collect.ImmutableMap;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mongodb.WriteConcern;
import com.mongodb.WriteResult;

@Test
public class RiverMongoCollectionFilterTest extends RiverMongoDBTestAbstract {

private static final String TEST_SIMPLE_MONGODB_RIVER_COLLECTION_FILTER_JSON = "/org/elasticsearch/river/mongodb/simple/test-simple-mongodb-river-collection-filter.json";
private DB mongoDB;
private DBCollection mongoCollection;
private Object collectionFilterWithPrefix = "{'o.lang':'de'}";
private Object collectionFilterNoPrefix = "{'lang':'de'}";

protected RiverMongoCollectionFilterTest() {
super("testmongodb-" + System.currentTimeMillis(), "testriver-" + System.currentTimeMillis(), "person-"
+ System.currentTimeMillis(), "personindex-" + System.currentTimeMillis());
}

@Test
public void collectionFilterWithPrefixTest() throws Throwable {
collectionFilterTest(collectionFilterWithPrefix);
}

@Test
public void collectionFilterNoPrefixTest() throws Throwable {
collectionFilterTest(collectionFilterNoPrefix);
}

private void collectionFilterTest(Object filter) throws Throwable {
logger.debug("Start CollectionFilter");
try {
createDatabase();

DBObject dbObject1 = new BasicDBObject(ImmutableMap.of("name", "Bernd", "lang", "de"));
WriteResult result1 = mongoCollection.insert(dbObject1);
logger.info("WriteResult: {}", result1.toString());
dbObject1 = new BasicDBObject(ImmutableMap.of("name", "Richard", "lang", "fr"));
result1 = mongoCollection.insert(dbObject1);
logger.info("WriteResult: {}", result1.toString());
Thread.sleep(wait);

createRiver(filter);
Thread.sleep(wait);

ActionFuture<IndicesExistsResponse> response = getNode().client().admin().indices()
.exists(new IndicesExistsRequest(getIndex()));
assertThat(response.actionGet().isExists(), equalTo(true));
refreshIndex();
assertThat(getNode().client().count(countRequest(getIndex())).actionGet().getCount(), equalTo(1l));

deleteRiver();
} catch (Throwable t) {
logger.error("CollectionFilter failed.", t);
t.printStackTrace();
throw t;
} finally {
cleanUp();
}
}

private void createDatabase() {
logger.debug("createDatabase {}", getDatabase());
try {
mongoDB = getMongo().getDB(getDatabase());
mongoDB.setWriteConcern(WriteConcern.REPLICAS_SAFE);
logger.info("Start createCollection");
mongoCollection = mongoDB.createCollection(getCollection(), null);
Assert.assertNotNull(mongoCollection);
} catch (Throwable t) {
logger.error("createDatabase failed.", t);
}
}

private void createRiver(Object filter) throws Exception {
super.createRiver(TEST_SIMPLE_MONGODB_RIVER_COLLECTION_FILTER_JSON, getRiver(), (Object) String.valueOf(getMongoPort1()),
(Object) String.valueOf(getMongoPort2()), (Object) String.valueOf(getMongoPort3()), (Object) getDatabase(),
(Object) getCollection(), filter, (Object) getIndex(), (Object) getDatabase());
}

private void cleanUp() {
super.deleteRiver();
logger.info("Drop database " + mongoDB.getName());
mongoDB.dropDatabase();
}

}
@@ -0,0 +1,24 @@
{
"type": "mongodb",
"mongodb": {
"servers": [{
"host": "localhost",
"port": %s
},
{
"host": "localhost",
"port": %s
},
{
"host": "localhost",
"port": %s
}],
"db": "%s",
"collection": "%s",
"filter": "%s"
},
"index": {
"name": "%s",
"throttle_size": 2000
}
}

0 comments on commit f2c7920

Please sign in to comment.