I've written a basic REST Server using Jersey2 on top of Jetty, to test out HTTP Chunked Transfer-Encoding, and gzip Content-Encoding. However I've found that the recommended method of implementing a WriterInceptor to apply a GZIPOutputStream for gzip encoding results in the server blocking instead of sending through a gzip'd chunk. I believe it is the GZIPOutputStream waiting for it's own buffer to fill up, so I tried overriding write() method in the WriterInterceptor to force a flush() after every write (as my server always writes one chunk at a time) but that made no difference. Is there a way of forcing the flush to occur whenever a write occurs?
App.java
public class App
{
public static int lineCount=0;
public static void main( String[] args ) {
System.out.println( "Hello World!" );
ResourceConfig config = new ResourceConfig();
config.packages("com.example.mockAPIjava");
ServletHolder servlet = new ServletHolder(new ServletContainer(config));
EncodingFilter.enableFor(config, GZipEncoder.class);
Server server = new Server(2222);
ServletContextHandler context = new ServletContextHandler(server, "/*");
context.addServlet(servlet, "/*");
try {
server.start();
server.join();
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
server.destroy();
}
}
}
GZIPWriterInterceptor.java
@Provider
@Compress
public class GZIPWriterInterceptor implements WriterInterceptor {
@Override
public void aroundWriteTo(WriterInterceptorContext context)
throws IOException, WebApplicationException {
MultivaluedMap<String,Object> headers = context.getHeaders();
headers.add("Content-Encoding", "gzip");
final OutputStream outputStream = context.getOutputStream();
context.setOutputStream(new GZIPOutputStream(outputStream) {
@Override
public void write(final int b) throws IOException {
out.write(b);
out.flush();
}
@Override
public void write(final byte[] b) throws IOException {
out.write(b);
out.flush();
}
@Override
public void write(final byte[] b, final int off, final int len) throws IOException {
out.write(b, off, len);
out.flush();
}
});
context.proceed();
}
}
Resource.java
@Path("stream")
public class Resource {
@GET
@Path("test")
@Compress
@Produces(MediaType.APPLICATION_JSON)
public ChunkedOutput<String> helloWorld(@Context HttpHeaders header, @Context HttpServletResponse response) {
final ChunkedOutput<String> output = new ChunkedOutput<String>(String.class, "\r\n");
new Thread() {
public void run() {
BufferedReader br = null;
try {
String chunk;
// open file for reading
File file = new File("/tmp/stream.txt");
FileReader fr = new FileReader(file);
br = new BufferedReader(fr);
while ((chunk = getNextString(br)) != null) {
// write a chunk every second
output.write(chunk);
try {
Thread.sleep(1 * 1000);
} catch(InterruptedException ex) {
Thread.currentThread().interrupt();
}
}
} catch (Exception e) {
// IOException thrown when writing the
// chunks of response: should be handled
e.printStackTrace();
} finally {
try {
output.close();
// simplified: IOException thrown from
// this close() should be handled here...
if (br!=null) { br.close(); }
} catch (IOException e1){
e1.printStackTrace();
}
}
}
}.start();
// the output will be probably returned even before
// a first chunk is written by the new thread
return output;
}
private String getNextString(BufferedReader br) throws IOException, ParseException {
App.lineCount++;
return br.readLine();;
}
}
Compress.java
//@Compress annotation is the name binding annotation for the GZIPWriterInterceptor
@NameBinding
@Retention(RetentionPolicy.RUNTIME)
public @interface Compress {}