Skip to content
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 12 additions & 24 deletions src/main/java/io/vertx/core/eventbus/impl/MessageConsumerImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -133,36 +133,24 @@ public synchronized Future<Void> unregister() {
}

protected boolean doReceive(Message<T> message) {
Handler<Message<T>> theHandler;
synchronized (this) {
if (handler == null) {
return false;
}
if (demand == 0L) {
if (pending.size() < maxBufferedMessages) {
pending.add(message);
return true;
} else {
discard(message);
if (discardHandler != null) {
discardHandler.handle(message);
} else {
log.warn("Discarding message as more than " + maxBufferedMessages + " buffered in paused consumer. address: " + address);
}
}
if (pending.size() < maxBufferedMessages) {
pending.add(message);
checkNextTick();
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this method simplified a lot and here we always just put it in hte pending (if the buffer not full yet of course) and send a tick (that will decied if we need to do anything right now or not)
as the tick evaulation happen on the contex thread we are safe (single threaded)

return true;
} else {
if (pending.size() > 0) {
pending.add(message);
message = pending.poll();
}
if (demand != Long.MAX_VALUE) {
demand--;
discard(message);
if (discardHandler != null) {
discardHandler.handle(message);
} else {
String pause = demand == 0 ? "paused" : "NOT paused";
log.warn("Discarding message as more than " + maxBufferedMessages + " buffered in " + pause + " consumer. address: " + address);
}
theHandler = handler;
}
}
deliver(theHandler, message);
return true;
}

Expand All @@ -171,7 +159,7 @@ protected void dispatch(Message<T> msg, ContextInternal context, Handler<Message
if (handler == null) {
throw new NullPointerException();
}
context.emit(msg, handler);
context.dispatch(msg, handler);
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this was reverted as it had a side effect:
after resume we were received many messages even we paused again during the first message processing.

}

private void deliver(Handler<Message<T>> theHandler, Message<T> message) {
Expand All @@ -183,8 +171,8 @@ private void deliver(Handler<Message<T>> theHandler, Message<T> message) {

private synchronized void checkNextTick() {
// Check if there are more pending messages in the queue that can be processed next time around
if (!pending.isEmpty() && demand > 0L) {
context.nettyEventLoop().execute(() -> {
if (demand > 0L && !pending.isEmpty()) {
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I flip here so the demand evaulated first

context.emit(__ -> {
Message<T> message;
Handler<Message<T>> theHandler;
synchronized (MessageConsumerImpl.this) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1401,7 +1401,6 @@ public void testUnregisterConsumerDiscardPendingMessages() {
eb.send(ADDRESS1, "val1");
Context ctx = Vertx.currentContext();
ctx.runOnContext(v -> {
consumer.resume();
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this was in the past executed on eventloop thread on the context... now this is executed on the context thread (in case we area lready on it it will not schedule new job, in case we are on another thread it will schedule an async job.
in short:
now this is executed here so the logic below was not anymore fine as this call wipe out sync the pending msgs

((MessageConsumerImpl<?>) consumer).discardHandler(discarded -> {
assertEquals("val1", discarded.body());
testComplete();
Expand Down
53 changes: 42 additions & 11 deletions src/test/java/io/vertx/core/eventbus/MessageConsumerTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,33 +7,41 @@
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;

public class MessageConsumerTest extends VertxTestBase {


@Test
public void testMessageConsumptionStayOnWorkerThreadAfterResume() throws Exception {
TestVerticle verticle = new TestVerticle(2);
public void testMessageConsumptionStayOnWorkerThreadAfterResumeAndOnlyDispatchOneMessageAtOneMoment() throws Exception {
int numberOfExpectedMessages = 10;
TestVerticle verticle = new TestVerticle(numberOfExpectedMessages);
EchoVerticle echoVerticle = new EchoVerticle();
Future<String> deployVerticle = vertx.deployVerticle(verticle, new DeploymentOptions().setThreadingModel(ThreadingModel.WORKER));
Future<String> deployEchoVerticle = vertx.deployVerticle(echoVerticle, new DeploymentOptions().setThreadingModel(ThreadingModel.WORKER));

CountDownLatch startLatch = new CountDownLatch(1);
deployVerticle.onComplete(onSuccess(cf -> startLatch.countDown()));
Future.all(deployVerticle, deployEchoVerticle)
.onComplete(onSuccess(cf -> startLatch.countDown()));
awaitLatch(startLatch);

vertx.eventBus().send("testAddress", "message1");
vertx.eventBus().send("testAddress", "message2");
for (int i = 1; i <= numberOfExpectedMessages; i++) {
vertx.eventBus().send("testAddress", "message" + i);
}

awaitLatch(verticle.msgLatch);

assertEquals(2, verticle.messageArrivedOnWorkerThread.size());
assertTrue("message1 should be processed on worker thread", verticle.messageArrivedOnWorkerThread.get("message1"));
assertTrue("message2 should be processed on worker thread", verticle.messageArrivedOnWorkerThread.get("message2"));
assertEquals(numberOfExpectedMessages, verticle.messageArrivedOnWorkerThread.size());
for (int i = 1; i <= numberOfExpectedMessages; i++) {
assertTrue("message" + i + " should be processed on worker thread", verticle.messageArrivedOnWorkerThread.get("message" + i));
}
}


private static class TestVerticle extends AbstractVerticle {

private final CountDownLatch msgLatch;
private final AtomicBoolean messageProcessingOngoing = new AtomicBoolean();

private final Map<String, Boolean> messageArrivedOnWorkerThread = new HashMap<>();

Expand All @@ -51,11 +59,34 @@ private void handleMessages(MessageConsumer<String> consumer) {
consumer.handler(msg -> {
consumer.pause();
messageArrivedOnWorkerThread.putIfAbsent(msg.body(), Context.isOnWorkerThread());
msgLatch.countDown();
vertx.setTimer(20, id -> {
consumer.resume();
if (messageProcessingOngoing.compareAndSet(false, true)) {
msgLatch.countDown();
} else {
System.err.println("Received message while processing another message");
}
vertx.eventBus().request("echoAddress", 20)
.onComplete(ar -> {
messageProcessingOngoing.set(false);
consumer.resume();
});
});
}
}

private static class EchoVerticle extends AbstractVerticle {
@Override
public void start() {
MessageConsumer<Integer> consumer = vertx.eventBus().localConsumer("echoAddress");
handleMessages(consumer);
}

private void handleMessages(MessageConsumer<Integer> consumer) {
consumer.handler(msg -> {
vertx.setTimer(msg.body(), id -> {
msg.reply(msg.body());
});
});
}
}

}
Loading