Fix hideous timing bug

This commit is contained in:
Kai S. K. Engelbart 2020-07-09 15:18:06 +02:00
parent ab61d2dbfd
commit f7ac79d872
No known key found for this signature in database
GPG Key ID: 0A48559CA32CB48F
2 changed files with 16 additions and 32 deletions

View File

@ -7,9 +7,7 @@ import java.util.concurrent.TimeoutException;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
import envoy.client.data.CacheMap; import envoy.client.data.*;
import envoy.client.data.ClientConfig;
import envoy.client.data.LocalDB;
import envoy.client.event.SendEvent; import envoy.client.event.SendEvent;
import envoy.data.*; import envoy.data.*;
import envoy.event.*; import envoy.event.*;
@ -60,9 +58,7 @@ public class Client implements Closeable {
* @throws InterruptedException if the current thread is interrupted while * @throws InterruptedException if the current thread is interrupted while
* waiting for the handshake response * waiting for the handshake response
*/ */
public void performHandshake(LoginCredentials credentials, public void performHandshake(LoginCredentials credentials, CacheMap cacheMap) throws TimeoutException, IOException, InterruptedException {
CacheMap cacheMap)
throws TimeoutException, IOException, InterruptedException {
if (online) throw new IllegalStateException("Handshake has already been performed successfully"); if (online) throw new IllegalStateException("Handshake has already been performed successfully");
// Establish TCP connection // Establish TCP connection
@ -104,9 +100,6 @@ public class Client implements Closeable {
online = true; online = true;
// Remove all processors as they are only used during the handshake
receiver.removeAllProcessors();
logger.log(Level.INFO, "Handshake completed."); logger.log(Level.INFO, "Handshake completed.");
} }
@ -124,22 +117,23 @@ public class Client implements Closeable {
public void initReceiver(LocalDB localDB, CacheMap cacheMap) throws IOException { public void initReceiver(LocalDB localDB, CacheMap cacheMap) throws IOException {
checkOnline(); checkOnline();
// Remove all processors as they are only used during the handshake
receiver.removeAllProcessors();
// Process incoming messages // Process incoming messages
final ReceivedMessageProcessor receivedMessageProcessor = new ReceivedMessageProcessor(); final var receivedMessageProcessor = new ReceivedMessageProcessor();
final ReceivedGroupMessageProcessor receivedGroupMessageProcessor = new ReceivedGroupMessageProcessor(); final var receivedGroupMessageProcessor = new ReceivedGroupMessageProcessor();
final MessageStatusChangeProcessor messageStatusChangeProcessor = new MessageStatusChangeProcessor(); final var messageStatusChangeProcessor = new MessageStatusChangeProcessor();
final GroupMessageStatusChangeProcessor groupMessageStatusChangeProcessor = new GroupMessageStatusChangeProcessor(); final var groupMessageStatusChangeProcessor = new GroupMessageStatusChangeProcessor();
receiver.registerProcessor(GroupMessage.class, receivedGroupMessageProcessor); receiver.registerProcessor(GroupMessage.class, receivedGroupMessageProcessor);
receiver.registerProcessor(Message.class, receivedMessageProcessor); receiver.registerProcessor(Message.class, receivedMessageProcessor);
receiver.registerProcessor(MessageStatusChange.class, messageStatusChangeProcessor); receiver.registerProcessor(MessageStatusChange.class, messageStatusChangeProcessor);
receiver.registerProcessor(GroupMessageStatusChange.class, groupMessageStatusChangeProcessor); receiver.registerProcessor(GroupMessageStatusChange.class, groupMessageStatusChangeProcessor);
// Relay cached unread messages and unread groupMessages // Relay caches
cacheMap.get(Message.class).setProcessor(receivedMessageProcessor); cacheMap.get(Message.class).setProcessor(receivedMessageProcessor);
cacheMap.get(GroupMessage.class).setProcessor(receivedGroupMessageProcessor); cacheMap.get(GroupMessage.class).setProcessor(receivedGroupMessageProcessor);
// Relay cached status changes
cacheMap.get(MessageStatusChange.class).setProcessor(messageStatusChangeProcessor); cacheMap.get(MessageStatusChange.class).setProcessor(messageStatusChangeProcessor);
cacheMap.get(GroupMessageStatusChange.class).setProcessor(groupMessageStatusChangeProcessor); cacheMap.get(GroupMessageStatusChange.class).setProcessor(groupMessageStatusChangeProcessor);
@ -172,18 +166,10 @@ public class Client implements Closeable {
// Request a generator if none is present or the existing one is consumed // Request a generator if none is present or the existing one is consumed
if (!localDB.hasIDGenerator() || !localDB.getIDGenerator().hasNext()) requestIdGenerator(); if (!localDB.hasIDGenerator() || !localDB.getIDGenerator().hasNext()) requestIdGenerator();
}
/** // Relay caches
* Creates a new write proxy that uses this client to communicate with the cacheMap.getMap().values().forEach(Cache::relay);
* server. }
*
* @param localDB the local database that the write proxy will use to access
* caches
* @return a new write proxy
* @since Envoy Client v0.3-alpha
*/
public WriteProxy createWriteProxy(LocalDB localDB) { return new WriteProxy(this, localDB); }
/** /**
* Sends a message to the server. The message's status will be incremented once * Sends a message to the server. The message's status will be incremented once

View File

@ -13,6 +13,7 @@ import javafx.scene.control.Alert.AlertType;
import envoy.client.data.*; import envoy.client.data.*;
import envoy.client.net.Client; import envoy.client.net.Client;
import envoy.client.net.WriteProxy;
import envoy.client.ui.ClearableTextField; import envoy.client.ui.ClearableTextField;
import envoy.client.ui.SceneContext; import envoy.client.ui.SceneContext;
import envoy.client.ui.Startup; import envoy.client.ui.Startup;
@ -134,8 +135,8 @@ public final class LoginScene {
try { try {
client.performHandshake(credentials, cacheMap); client.performHandshake(credentials, cacheMap);
if (client.isOnline()) { if (client.isOnline()) {
client.initReceiver(localDB, cacheMap);
loadChatScene(); loadChatScene();
client.initReceiver(localDB, cacheMap);
} }
} catch (IOException | InterruptedException | TimeoutException e) { } catch (IOException | InterruptedException | TimeoutException e) {
logger.log(Level.INFO, "Could not connect to server. Entering offline mode..."); logger.log(Level.INFO, "Could not connect to server. Entering offline mode...");
@ -175,7 +176,7 @@ public final class LoginScene {
} }
// Initialize write proxy // Initialize write proxy
final var writeProxy = client.createWriteProxy(localDB); final var writeProxy = new WriteProxy(client, localDB);
localDB.synchronize(); localDB.synchronize();
@ -195,8 +196,5 @@ public final class LoginScene {
sceneContext.getStage().setMinWidth(350); sceneContext.getStage().setMinWidth(350);
sceneContext.load(SceneContext.SceneInfo.CHAT_SCENE); sceneContext.load(SceneContext.SceneInfo.CHAT_SCENE);
sceneContext.<ChatScene>getController().initializeData(sceneContext, localDB, client, writeProxy); sceneContext.<ChatScene>getController().initializeData(sceneContext, localDB, client, writeProxy);
// Relay the caches if online
if (client.isOnline()) cacheMap.getMap().values().forEach(Cache::relay);
} }
} }