I have a question about openhft chronicles. How can I save connection of client and server (or client's tid on server to connect to the client later) after sending response from server and send to the client additional response with additional data? For example if some data were updated on server. Thank in advance
type here
import net.openhft.chronicle.bytes.Bytes;
import net.openhft.chronicle.core.threads.EventLoop;
import net.openhft.chronicle.core.threads.HandlerPriority;
import net.openhft.chronicle.network.AcceptorEventHandler;
import net.openhft.chronicle.network.NetworkContext;
import net.openhft.chronicle.network.TCPRegistry;
import net.openhft.chronicle.network.VanillaNetworkContext;
import net.openhft.chronicle.network.connection.FatalFailureConnectionStrategy;
import net.openhft.chronicle.network.connection.TcpChannelHub;
import net.openhft.chronicle.network.connection.TryLock;
import net.openhft.chronicle.network.tcp.ChronicleServerSocketChannel;
import net.openhft.chronicle.threads.EventGroup;
import net.openhft.chronicle.wire.TextWire;
import net.openhft.chronicle.wire.Wire;
import net.openhft.chronicle.wire.WireType;
import net.openhft.chronicle.wire.YamlLogging;
import nftClientServer.network.LegacyHanderFactory;
import nftClientServer.handlers.ActorRequestHandler;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeoutException;
import static net.openhft.chronicle.network.connection.SocketAddressSupplier.uri;
import static net.openhft.chronicle.network.connection.TcpChannelHub.TCP_USE_PADDING;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class Main<T extends NetworkContext<T>> {
public static void main(String[] args) throws IOException{
YamlLogging.setAll(true);
for (; ; ) {
// this the name of a reference to the host name and port,
// allocated automatically to a free port on localhost
final String port1 = "8080";
TCPRegistry.createServerSocketChannelFor(port1);
Bytes<ByteBuffer> bytes = Bytes.elasticByteBuffer();
// we use an event loop rather than lots of threads
try (EventLoop eg = EventGroup.builder().build()) {
eg.start();
// an example message that we are going to send from the server to the client and back
createSimpleTcpServer(port1, eg);
List<TcpChannelHub> clients = new ArrayList<>();
Map<TcpChannelHub, Long> tids = new HashMap<>();
try {
// create the message the client sends to the server
// we will use a text wire backed by a elasticByteBuffer
final Wire wire = new TextWire(bytes).useTextDocuments();
//creating client
TcpChannelHub clientCreate = createClient(eg, port1);
clients.add(clientCreate);
// the tid must be unique, its reflected back by the server, it must be at the start
// of each message sent from the server to the client. Its use by the client to identify which
// thread will handle this message
final long tid = clientCreate.nextUniqueTransaction(System.currentTimeMillis());
tids.put(clientCreate, tid);
System.out.println(/*"i is " + i + */" tid is " + tid);
String actorName = /*i==0?*/ "Bruce";//: i==1 ? "Arnold": "noName";
configureWire(wire, tid, actorName, clientCreate);
// write the data to the socket
clientCreate.lock2(() -> clientCreate.writeSocket(wire, true, false),
true, TryLock.TRY_LOCK_WARN);
clients.forEach(clientReadDataFromServer -> {
// read the reply from the socket ( timeout after 5 second ), note: we have to pass
// the tid
try {
for(int i=0; i<2; i++) {
System.out.println("client " + tids.get(clientReadDataFromServer) + " before client answer");
Wire reply = clientReadDataFromServer.proxyReply(5000, tids.get(clientReadDataFromServer));
System.out.println("client " + tids.get(clientReadDataFromServer) + " before reading answer");
// read the reply and check the result
reply.readDocument(null, data -> {
final String text = data.read("payloadResponse").text();
System.out.println(text);
});
System.out.println("client " + tids.get(clientReadDataFromServer) + " after reading answer");
}
} catch (TimeoutException e) {
System.out.println("client " + tids.get(clientReadDataFromServer) + " timeout exception");
}
});
eg.stop();
break;
} finally {
TcpChannelHub.closeAllHubs();
clients.forEach(clientClosing ->{
clientClosing.close();
});
TCPRegistry.reset();
}
}
}
}
private static TcpChannelHub createClient(EventLoop eg, String desc) {
return new TcpChannelHub(null,
eg, WireType.TEXT, "/", uri(desc), false,
null, HandlerPriority.TIMER, new FatalFailureConnectionStrategy(3, false));
}
private static void createSimpleTcpServer(String port, EventLoop eg) throws IOException {
AcceptorEventHandler eah = new AcceptorEventHandler(port,
LegacyHanderFactory.simpleTcpEventHandlerFactory(ActorRequestHandler::new),
VanillaNetworkContext::new);
eg.addHandler(eah);
ChronicleServerSocketChannel sc = TCPRegistry.acquireServerSocketChannel(port);
sc.configureBlocking(false);
}
private static void configureWire(Wire wire, long tid, String messageForServer, TcpChannelHub client) {
// we will use a text wire backed by a elasticByteBuffer
wire.usePadding(TCP_USE_PADDING);
wire.writeDocument(true, w -> w.write("tid").int64(tid));
wire.writeDocument(false, w -> w.write("payload").text(messageForServer));
}
}
public enum LegacyHanderFactory {
public static <T extends NetworkContext<T>> Function<T, TcpEventHandler<T>> simpleTcpEventHandlerFactory(
final Function<T, TcpHandler<T>> defaultHandedFactory) {
return (networkContext) -> {
networkContext.wireOutPublisher(new VanillaWireOutPublisher(TEXT));
final TcpEventHandler<T> handler = new TcpEventHandler<>(networkContext);
handler.tcpHandler(new WireTypeSniffingTcpHandler<>(handler,
defaultHandedFactory));
return handler;
};
}
}
import net.openhft.chronicle.network.NetworkContext;
import net.openhft.chronicle.network.WireTcpHandler;
import net.openhft.chronicle.wire.DocumentContext;
import net.openhft.chronicle.wire.Wire;
import net.openhft.chronicle.wire.WireOut;
import java.util.HashMap;
import java.util.Map;
public class ActorRequestHandler extends WireTcpHandler {
private Map<String, String> actors = new HashMap<>();
public ActorRequestHandler(NetworkContext networkContext) {
System.out.println("ActorRequestHandler constructor");
}
/**
* simply reads the csp,tid and payload and sends back the tid and payload
*
* @param in the DocumentContext from the client
* @param outWire the wire to be sent back to the server
*/
@Override
protected void onRead(DocumentContext in,
WireOut outWire) {
Wire wire = in.wire();
assert wire != null;
if (in.isMetaData()) {
long tid = wire.read("tid").int64();
String payload = wire.read("payload").text();
System.out.println("server, tid is " + tid+". Text is " + payload);
outWire.writeDocument(true,
meta -> meta.write("tid").int64(tid));
} else {
long tid = wire.read("tid").int64();
String payload = wire.read("payload").text();
System.out.println("server, actor name is " + payload+". Text is " + payload);
for(int i = 0; i<2; i++) {
int numberOfAnswer = i;
if(i==1) {
System.out.println("server is sleeping");
try {
Thread.sleep(4000);
} catch (InterruptedException e) {
System.out.println("server timeout exception");
}
}
System.out.println("server, actor name is " + payload+". Answer " + numberOfAnswer + " will be sent");
outWire.writeDocument(false,
data -> data.write("payloadResponse").text(actors.get(payload)+numberOfAnswer));
System.out.println("server, actor name is " + payload+". Answer " + numberOfAnswer + " has been sent");
}
}
}
@Override
protected void onInitialize() {
System.out.println("ActorRequestHandler onInitialize");
actors.put("Bruce", "Willis");
actors.put("Arnold", "Schwarznegger");
}
}