Skip to content

Commit

Permalink
Merge pull request #152 from benmccann/admin-repl-info
Browse files Browse the repository at this point in the history
Add replication info to admin page and check whether index exists before initial import
  • Loading branch information
richardwilly98 committed Oct 2, 2013
2 parents 81fdfbb + 256140a commit f6252e7
Show file tree
Hide file tree
Showing 16 changed files with 361 additions and 328 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,19 +20,25 @@
import org.elasticsearch.rest.XContentRestResponse;
import org.elasticsearch.rest.XContentThrowableRestResponse;
import org.elasticsearch.rest.action.support.RestXContentBuilder;
import org.elasticsearch.river.RiverIndexName;
import org.elasticsearch.river.RiverSettings;
import org.elasticsearch.river.mongodb.MongoDBRiver;
import org.elasticsearch.river.mongodb.MongoDBRiverDefinition;
import org.elasticsearch.river.mongodb.Status;
import org.elasticsearch.river.mongodb.util.MongoDBRiverHelper;
import org.elasticsearch.search.SearchHit;

public class RestMongoDBRiverAction extends BaseRestHandler {

private static final String BASE_URL = "/_river/" + MongoDBRiver.TYPE;
private final String riverIndexName;

@Inject
public RestMongoDBRiverAction(Settings settings, Client client, RestController controller) {
public RestMongoDBRiverAction(Settings settings, Client client, RestController controller, @RiverIndexName String riverIndexName) {
super(settings, client);
controller.registerHandler(RestRequest.Method.GET, BASE_URL + "/{action}", this);
controller.registerHandler(RestRequest.Method.POST, BASE_URL + "/{river}/{action}", this);
this.riverIndexName = riverIndexName;
String baseUrl = "/" + riverIndexName + "/" + MongoDBRiver.TYPE;
controller.registerHandler(RestRequest.Method.GET, baseUrl + "/{action}", this);
controller.registerHandler(RestRequest.Method.POST, baseUrl + "/{river}/{action}", this);
}

@Override
Expand Down Expand Up @@ -61,7 +67,7 @@ private void start(RestRequest request, RestChannel channel) {
respondError(request, channel, "Parameter 'river' is required", RestStatus.BAD_REQUEST);
return;
}
MongoDBRiverHelper.setRiverEnabled(client, river, true);
MongoDBRiverHelper.setRiverStatus(client, river, Status.RUNNING);
respondSuccess(request, channel, RestStatus.OK);
}

Expand All @@ -71,7 +77,7 @@ private void stop(RestRequest request, RestChannel channel) {
respondError(request, channel, "Parameter 'river' is required", RestStatus.BAD_REQUEST);
return;
}
MongoDBRiverHelper.setRiverEnabled(client, river, false);
MongoDBRiverHelper.setRiverStatus(client, river, Status.STOPPED);
respondSuccess(request, channel, RestStatus.OK);
}

Expand Down Expand Up @@ -120,16 +126,23 @@ private void errorResponse(RestRequest request, RestChannel channel, Throwable e
}

private List<Map<String, Object>> getRivers() {
SearchResponse searchResponse = client.prepareSearch("_river").setQuery(new FieldQueryBuilder("type", "mongodb")).execute()
.actionGet();
SearchResponse searchResponse = client.prepareSearch("_river")
.setQuery(new FieldQueryBuilder("type", "mongodb"))
.execute().actionGet();
long totalHits = searchResponse.getHits().totalHits();
logger.trace("totalHits: {}", totalHits);
List<Map<String, Object>> rivers = new ArrayList<Map<String, Object>>();
for (SearchHit hit : searchResponse.getHits().hits()) {
Map<String, Object> source = new HashMap<String, Object>();
source.put("name", hit.getType());
source.put("enabled", MongoDBRiverHelper.isRiverEnabled(client, hit.getType()));
String riverName = hit.getType();
RiverSettings riverSettings = new RiverSettings(null, hit.getSource());
MongoDBRiverDefinition definition = MongoDBRiverDefinition.parseSettings(riverName, riverIndexName, riverSettings, null);

source.put("name", riverName);
source.put("status", MongoDBRiverHelper.getRiverStatus(client, hit.getType()));
source.put("settings", hit.getSource());
source.put("lastTimestamp", MongoDBRiver.getLastTimestamp(client, definition));
source.put("indexCount", MongoDBRiver.getIndexCount(client, definition));
logger.trace("source: {}", hit.getSourceAsString());
rivers.add(source);
}
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/org/elasticsearch/river/mongodb/Indexer.java
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public Indexer(MongoDBRiverDefinition definition, SharedContext context, Client

@Override
public void run() {
while (context.isActive()) {
while (context.getStatus() == Status.RUNNING) {
sw = new StopWatch().start();
deletedDocuments = 0;
insertedDocuments = 0;
Expand Down
27 changes: 18 additions & 9 deletions src/main/java/org/elasticsearch/river/mongodb/MongoDBRiver.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import org.bson.types.BSONTimestamp;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.count.CountResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.block.ClusterBlockException;
Expand Down Expand Up @@ -74,8 +75,8 @@ public class MongoDBRiver extends AbstractRiverComponent implements River {

public final static String TYPE = "mongodb";
public final static String NAME = "mongodb-river";
public final static String STATUS = "_mongodbstatus";
public final static String ENABLED = "enabled";
public final static String STATUS_ID = "_riverstatus";
public final static String STATUS_FIELD = "status";
public final static String DESCRIPTION = "MongoDB River Plugin";
public final static String LAST_TIMESTAMP_FIELD = "_last_ts";
public final static String MONGODB_LOCAL_DATABASE = "local";
Expand Down Expand Up @@ -130,21 +131,21 @@ public MongoDBRiver(RiverName riverName, RiverSettings settings, @RiverIndexName
BlockingQueue<QueueEntry> stream = definition.getThrottleSize() == -1 ? new LinkedTransferQueue<QueueEntry>()
: new ArrayBlockingQueue<QueueEntry>(definition.getThrottleSize());

this.context = new SharedContext(stream, false);
this.context = new SharedContext(stream, Status.STOPPED);

this.statusThread = EsExecutors.daemonThreadFactory(settings.globalSettings(), "mongodb_river_status").newThread(
new Status(this, definition, context));
new StatusChecker(this, definition, context));
this.statusThread.start();
}

@Override
public void start() {
if (!MongoDBRiverHelper.isRiverEnabled(client, riverName.getName())) {
if (MongoDBRiverHelper.getRiverStatus(client, riverName.getName()) == Status.STOPPED) {
logger.debug("Cannot start river {}. It is currently disabled", riverName.getName());
startInvoked = true;
return;
}
this.context.setActive(true);
this.context.setStatus(Status.RUNNING);
for (ServerAddress server : definition.getMongoServers()) {
logger.info("Using mongodb server(s): host [{}], port [{}]", server.getHost(), server.getPort());
}
Expand Down Expand Up @@ -330,7 +331,7 @@ public void close() {
} catch (Throwable t) {
logger.error("Fail to close river {}", t, riverName.getName());
} finally {
this.context.setActive(false);
this.context.setStatus(Status.STOPPED);
}
}

Expand All @@ -351,8 +352,8 @@ private XContentBuilder getGridFSMapping() throws IOException {
public static BSONTimestamp getLastTimestamp(Client client, MongoDBRiverDefinition definition) {

GetResponse lastTimestampResponse = client
.prepareGet(definition.getRiverIndexName(), definition.getRiverName(), definition.getMongoOplogNamespace()).execute()
.actionGet();
.prepareGet(definition.getRiverIndexName(), definition.getRiverName(), definition.getMongoOplogNamespace())
.execute().actionGet();

// API changes since 0.90.0 lastTimestampResponse.exists() replaced by
// lastTimestampResponse.isExists()
Expand Down Expand Up @@ -396,6 +397,14 @@ static void updateLastTimestamp(final MongoDBRiverDefinition definition, final B
}
}

public static long getIndexCount(Client client, MongoDBRiverDefinition definition) {
CountResponse countResponse = client
.prepareCount(definition.getIndexName())
.execute().actionGet();
return countResponse.getCount();
}


protected static class QueueEntry {

private final DBObject data;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.river.RiverName;
import org.elasticsearch.river.RiverSettings;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptService;
Expand Down Expand Up @@ -319,15 +318,15 @@ public MongoDBRiverDefinition build() {
}

@SuppressWarnings("unchecked")
public synchronized static MongoDBRiverDefinition parseSettings(RiverName riverName, String riverIndexName, RiverSettings settings,
public synchronized static MongoDBRiverDefinition parseSettings(String riverName, String riverIndexName, RiverSettings settings,
ScriptService scriptService) {

Preconditions.checkNotNull(riverName, "No riverName specified");
Preconditions.checkNotNull(riverIndexName, "Not riverIndexName specified");
Preconditions.checkNotNull(settings, "No settings specified");

Builder builder = new Builder();
builder.riverName(riverName.name());
builder.riverName(riverName);
builder.riverIndexName(riverIndexName);

List<ServerAddress> mongoServers = new ArrayList<ServerAddress>();
Expand Down Expand Up @@ -513,8 +512,8 @@ public synchronized static MongoDBRiverDefinition parseSettings(RiverName riverN
// mongoDbPassword = mdp;
}

builder.mongoDb(XContentMapValues.nodeStringValue(mongoSettings.get(DB_FIELD), riverName.name()));
builder.mongoCollection(XContentMapValues.nodeStringValue(mongoSettings.get(COLLECTION_FIELD), riverName.name()));
builder.mongoDb(XContentMapValues.nodeStringValue(mongoSettings.get(DB_FIELD), riverName));
builder.mongoCollection(XContentMapValues.nodeStringValue(mongoSettings.get(COLLECTION_FIELD), riverName));
builder.mongoGridFS(XContentMapValues.nodeBooleanValue(mongoSettings.get(GRIDFS_FIELD), false));
if (mongoSettings.containsKey(FILTER_FIELD)) {
builder.mongoFilter(XContentMapValues.nodeStringValue(mongoSettings.get(FILTER_FIELD), ""));
Expand All @@ -541,8 +540,8 @@ public synchronized static MongoDBRiverDefinition parseSettings(RiverName riverN
} catch (UnknownHostException e) {
e.printStackTrace();
}
builder.mongoDb(riverName.name());
builder.mongoCollection(riverName.name());
builder.mongoDb(riverName);
builder.mongoCollection(riverName);
}

if (settings.settings().containsKey(INDEX_OBJECT)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ protected void configure() {
}

@Provides
protected MongoDBRiverDefinition provideDefinition(RiverName riverName, RiverSettings settings, @RiverIndexName String riverIndexName,
ScriptService scriptService) {
return MongoDBRiverDefinition.parseSettings(riverName, riverIndexName, settings, scriptService);
protected MongoDBRiverDefinition provideDefinition(
RiverName riverName, RiverSettings settings, @RiverIndexName String riverIndexName, ScriptService scriptService) {
return MongoDBRiverDefinition.parseSettings(riverName.name(), riverIndexName, settings, scriptService);
}

}
14 changes: 7 additions & 7 deletions src/main/java/org/elasticsearch/river/mongodb/SharedContext.java
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@
public class SharedContext {

private BlockingQueue<QueueEntry> stream;
private boolean active;
private Status status;

public SharedContext(BlockingQueue<QueueEntry> stream, boolean active) {
public SharedContext(BlockingQueue<QueueEntry> stream, Status status) {
this.stream = stream;
this.active = active;
this.status = status;
}

public BlockingQueue<QueueEntry> getStream() {
Expand All @@ -25,12 +25,12 @@ public void setStream(BlockingQueue<QueueEntry> stream) {
this.stream = stream;
}

public boolean isActive() {
return active;
public Status getStatus() {
return status;
}

public void setActive(boolean active) {
this.active = active;
public void setStatus(Status status) {
this.status = status;
}

}
17 changes: 13 additions & 4 deletions src/main/java/org/elasticsearch/river/mongodb/Slurper.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.river.mongodb.util.MongoDBHelper;
import org.elasticsearch.river.mongodb.util.MongoDBRiverHelper;

import com.mongodb.BasicDBObject;
import com.mongodb.Bytes;
Expand Down Expand Up @@ -64,15 +65,19 @@ public Slurper(List<ServerAddress> mongoServers, MongoDBRiverDefinition definiti

@Override
public void run() {
while (context.isActive()) {
while (context.getStatus() == Status.RUNNING) {
try {
if (!assignCollections()) {
break; // failed to assign oplogCollection or
// slurpedCollection
}

BSONTimestamp startTimestamp = null;
if (!riverHasIndexedSomething()) {
if (!riverHasIndexedFromOplog()) {
if (!isIndexEmpty()) {
MongoDBRiverHelper.setRiverStatus(client, definition.getRiverName(), Status.INITIAL_IMPORT_FAILED);
break;
}
startTimestamp = doInitialImport();
}

Expand Down Expand Up @@ -113,15 +118,19 @@ public void run() {
}
}

protected boolean riverHasIndexedSomething() {
protected boolean riverHasIndexedFromOplog() {
return MongoDBRiver.getLastTimestamp(client, definition) != null;
}

protected boolean isIndexEmpty() {
return MongoDBRiver.getIndexCount(client, definition) == 0;
}

/**
* Does an initial sync the same way MongoDB does.
* https://groups.google.com/
* forum/?fromgroups=#!topic/mongodb-user/sOKlhD_E2ns
*
*
* @return the last oplog timestamp before the import began
* @throws InterruptedException
* if the blocking queue stream is interrupted while waiting
Expand Down
46 changes: 6 additions & 40 deletions src/main/java/org/elasticsearch/river/mongodb/Status.java
Original file line number Diff line number Diff line change
@@ -1,43 +1,9 @@
package org.elasticsearch.river.mongodb;

import org.elasticsearch.river.mongodb.util.MongoDBRiverHelper;
public enum Status {

class Status implements Runnable {

private final MongoDBRiver mongoDBRiver;
private final MongoDBRiverDefinition definition;
private final SharedContext context;

public Status(MongoDBRiver mongoDBRiver, MongoDBRiverDefinition definition, SharedContext context) {
this.mongoDBRiver = mongoDBRiver;
this.definition = definition;
this.context = context;
}

@Override
public void run() {
while (true) {
try {
if (this.mongoDBRiver.startInvoked) {
boolean enabled = MongoDBRiverHelper.isRiverEnabled(this.mongoDBRiver.client, this.definition.getRiverName());

if (this.context.isActive() && !enabled) {
MongoDBRiver.logger.info("About to stop river: {}", this.definition.getRiverName());
this.mongoDBRiver.close();
}

if (!this.context.isActive() && enabled) {
MongoDBRiver.logger.trace("About to start river: {}", this.definition.getRiverName());
this.mongoDBRiver.start();
}
}
Thread.sleep(1000L);
} catch (InterruptedException e) {
MongoDBRiver.logger.info("Status thread interrupted", e, (Object) null);
Thread.currentThread().interrupt();
break;
}

}
}
}
RUNNING,
STOPPED,
INITIAL_IMPORT_FAILED;

}
Loading

0 comments on commit f6252e7

Please sign in to comment.