├── README.md ├── build.gradle └── src ├── main └── java │ └── com │ └── bluedevel │ └── concurrent │ ├── AsyncOutputStreamQueue.java │ ├── BufferedOutputStream.java │ ├── BufferedOutputStream2.java │ ├── CustomBlockingMpmcQueue.java │ └── ParallelGZIPOutputStream.java └── test └── java └── com └── bluedevel └── concurrent ├── AsyncOutputStreamQueueTest.java ├── BufferedOutputStream2Test.java ├── BufferedOutputStreamTest.java ├── CustomBlockingMpmcQueueTest.java └── ParallelGZIPOutputStreamTest.java /README.md: -------------------------------------------------------------------------------- 1 | # Concurrent utilities 2 | 3 | PRs welcome, these are collections of utilities I have written or wished was available 4 | when attempting to build a faster and more concurrent system. 5 | 6 | Please read the source before applying them to your system as they will kick off 7 | new threads with waiting loops etc. You might need to tune them for your solution. 8 | 9 | ## AsyncOutputStreamQueue 10 | 11 | Allows offload of the work to the next stage of an OutputStream on another thread 12 | via a queue. This means your producer can keep generating data to put on the output 13 | stream while the next stage is performing the actual output work. 14 | 15 | Ideally this is what the PipedInputStream / PipedOutputStream should do. Unfortunately 16 | the PipedOutputStream has 1sec sleeps and requires wait/notify or other solutions 17 | to get a reasonable performance. 18 | 19 | ## BufferedOutputStream 20 | 21 | A BufferedOutputStream drop-in replacement that uses atomic CAS instead of synchronized 22 | blocks to get data through. If you have many threads attempting to write to a fast 23 | OutputStream, then this stream should operate a lot faster. 24 | 25 | As an example, both other OutputStreams here queue the work on other threads, and tend 26 | to return very quickly, this means the overhead of a synchronized call can be quite large 27 | with lots of writers 28 | 29 | ## BufferedOutputStream2 30 | 31 | Another attempt, using an MPSC queue and a thread. In my testing this was a little slower 32 | than the CAS solution, so you should probably prefer that. Left here for testing and/or 33 | further improvements. 34 | 35 | ## ParallelGZIPOutputStream 36 | 37 | A parallel implementation of gzip for the JVM. A drop-in replacement for the existing 38 | GZIPOutputStream. Achieve a significant improvement in your gzip output performance. 39 | 40 | Inspired by Shevek's work, only without a buffer and with a little less locking 41 | https://github.com/shevek/parallelgzip 42 | 43 | ## CustomBlockingMpmcQueue 44 | 45 | When Java's out of the box ArrayBlockingQueue and similar become a bit slow, you can 46 | switch to NitsanW's JCTools queues. Unfortunately they don't out of the box implement 47 | BlockingQueue, which you need for an Executor. Use this adapter in your executor and 48 | you can take advantage of the JCTools queue performance in your executor. 49 | 50 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | buildscript { 2 | repositories { 3 | mavenCentral() 4 | } 5 | } 6 | 7 | plugins { 8 | id 'java' 9 | } 10 | 11 | repositories { 12 | mavenCentral() 13 | } 14 | 15 | dependencies { 16 | compile group: 'org.jctools', name: 'jctools-core', version: '2.0.2' 17 | 18 | testCompile group: 'junit', name: 'junit', version: '4.11' 19 | } 20 | -------------------------------------------------------------------------------- /src/main/java/com/bluedevel/concurrent/AsyncOutputStreamQueue.java: -------------------------------------------------------------------------------- 1 | package com.bluedevel.concurrent; 2 | 3 | import java.io.Closeable; 4 | import java.io.IOException; 5 | import java.io.OutputStream; 6 | import java.util.ArrayList; 7 | import java.util.concurrent.ArrayBlockingQueue; 8 | 9 | /** 10 | * Allows offload of the work to the next stage of an OutputStream on another thread 11 | * via a queue. This means your producer can keep generating data to put on the output 12 | * stream while the next stage is performing the actual output work. 13 | * 14 | * Ideally this is what the PipedInputStream / PipedOutputStream should do. Unfortunately 15 | * the PipedOutputStream has 1sec sleeps and requires wait/notify or other solutions 16 | * to get a reasonable performance. 17 | */ 18 | public class AsyncOutputStreamQueue extends OutputStream { 19 | 20 | /** 21 | * Implementation notes: 22 | * 23 | * A byte array of length 0 is used to flag the "close" or end of stream. 24 | * So, we need to make sure we do not pass a zero-length array to the queue 25 | * and ensure it is only sent if close() is explicitly called 26 | */ 27 | 28 | private static final int BUFFER_SIZE = 64; 29 | private final ArrayBlockingQueue queue = new ArrayBlockingQueue<>(BUFFER_SIZE); 30 | public AsyncOutputStreamQueue(final OutputStream out) { 31 | new Thread(new QueueReader(out)).start(); 32 | } 33 | 34 | @Override 35 | public void write(byte[] b) throws IOException { 36 | if (b.length == 0) return; 37 | 38 | try { 39 | queue.put(b); 40 | } catch (InterruptedException ignored) { 41 | throw new IOException("was interrupted"); 42 | } 43 | } 44 | 45 | @Override 46 | public void write(byte[] b, int offset, int length) throws IOException { 47 | if (length == 0) return; 48 | 49 | byte[] dest = new byte[length]; 50 | System.arraycopy(b, offset, dest, 0, length); 51 | write(dest); 52 | } 53 | 54 | @Override 55 | public void write(int b) throws IOException { 56 | write(new byte[] { (byte) (b & 0xff) }); 57 | } 58 | 59 | @Override 60 | public void close() throws IOException { 61 | try { 62 | queue.put(new byte[0]); 63 | } catch (InterruptedException ignored) { 64 | throw new IOException("was interrupted"); 65 | } 66 | } 67 | 68 | @Override 69 | public void flush() { 70 | // TODO would be nice to handle this 71 | } 72 | 73 | private class QueueReader implements Runnable { 74 | private final OutputStream out; 75 | public QueueReader(final OutputStream out) { 76 | this.out = out; 77 | } 78 | 79 | @Override 80 | public void run() { 81 | try { 82 | while (true) { 83 | ArrayList buffers = new ArrayList<>(BUFFER_SIZE * 2); 84 | buffers.add(queue.take()); 85 | queue.drainTo(buffers); 86 | for(byte[] buffer : buffers) { 87 | if (buffer.length == 0) { 88 | return; 89 | } 90 | out.write(buffer); 91 | } 92 | buffers.clear(); 93 | } 94 | } catch (InterruptedException ignored) { 95 | } catch (IOException e) { 96 | e.printStackTrace(); 97 | } finally { 98 | try { 99 | out.flush(); 100 | } catch (IOException e) { 101 | e.printStackTrace(); 102 | } 103 | close(out); 104 | } 105 | } 106 | 107 | private void close(Closeable closeable) { 108 | if (closeable != null) { 109 | try { 110 | closeable.close(); 111 | } catch (IOException ignored) { 112 | } 113 | } 114 | } 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /src/main/java/com/bluedevel/concurrent/BufferedOutputStream.java: -------------------------------------------------------------------------------- 1 | package com.bluedevel.concurrent; 2 | 3 | import java.io.Closeable; 4 | import java.io.IOException; 5 | import java.io.OutputStream; 6 | import java.util.concurrent.atomic.AtomicReferenceArray; 7 | 8 | /** 9 | * A BufferedOutputStream drop-in replacement that uses atomic CAS instead of synchronized 10 | * blocks to get data through. If you have many threads attempting to write to a fast 11 | * OutputStream, then this stream will operate a lot faster than the standard. 12 | */ 13 | public class BufferedOutputStream extends OutputStream { 14 | 15 | /** 16 | * Implementation notes: 17 | * 18 | * Values come in on a write() call. 19 | * 20 | * State is tracked with a State object, which tracks 21 | * where we currently. An AtomicReference CAS is used to 22 | * claim space (once a reference owns the buffer slots, it writes 23 | * to the slots). At any point that a thread wants to replace the 24 | * buffer it can CAS out the buffer as well. If so, the thread that 25 | * replaces the buffer must do the write to the downstream. 26 | * 27 | * To ensure that all threads who should have published to the 28 | * blocks are complete, there is an additional CAS on an AtomicInteger 29 | * to specify the total write length. 30 | * 31 | * TODO hard coded buffer size and stripe details 32 | * TODO on write() call with lengths larger than buffer_size is not handled 33 | */ 34 | 35 | private static final int BUFFER_SIZE = 24576; 36 | private static final int STRIPE_COUNT = 32; 37 | private static final int STRIPE_MASK = (STRIPE_COUNT - 1); 38 | 39 | private static class State { 40 | public final byte[] buffer; 41 | public final int claimed; 42 | public final int published; 43 | 44 | public State() { 45 | this.buffer = new byte[BUFFER_SIZE]; 46 | this.claimed = 0; 47 | this.published = 0; 48 | } 49 | 50 | public State(final int claimed) { 51 | this.buffer = new byte[BUFFER_SIZE]; 52 | this.claimed = claimed; 53 | this.published = 0; 54 | } 55 | 56 | public State(final byte[] buffer, final int claimed, final int published) { 57 | this.buffer = buffer; 58 | this.claimed = claimed; 59 | this.published = published; 60 | } 61 | 62 | public boolean canClaim(int claim) { 63 | return (buffer.length > (this.claimed + claim)); 64 | } 65 | 66 | public State claim(int claim) { 67 | return new State(this.buffer, this.claimed + claim, this.published); 68 | } 69 | 70 | public State publish(int published) { 71 | return new State(this.buffer, this.claimed, this.published + published); 72 | } 73 | } 74 | 75 | private final AtomicReferenceArray stateRefs; 76 | private volatile boolean isClosed; 77 | private final OutputStream out; 78 | public BufferedOutputStream(final OutputStream out) { 79 | this.out = out; 80 | this.stateRefs = new AtomicReferenceArray<>(STRIPE_COUNT); 81 | for(int i = 0; i < STRIPE_COUNT; i++) { 82 | stateRefs.set(i, new State()); 83 | } 84 | this.isClosed = false; 85 | } 86 | 87 | private int getStripe() { 88 | return (int) (Thread.currentThread().getId() & STRIPE_MASK); 89 | } 90 | 91 | @Override 92 | public void write(byte[] b, int offset, int length) throws IOException { 93 | if (isClosed) throw new IOException("stream is closed"); 94 | 95 | final int stripe = getStripe(); 96 | 97 | State ownedState = null; 98 | while(true) { 99 | State current = stateRefs.get(stripe); 100 | 101 | // if state can accept more bytes, then try to CAS in a claim 102 | if (current.canClaim(length)) { 103 | State newState = current.claim(length); 104 | boolean didClaim = stateRefs.compareAndSet(stripe, current, newState); 105 | if (didClaim) { 106 | ownedState = newState; 107 | break; 108 | } 109 | // if state cannot accept more bytes, then flush to CAS in a new array 110 | } else { 111 | flush(stripe); 112 | } 113 | } 114 | 115 | // write bytes to buffer 116 | System.arraycopy(b, offset, ownedState.buffer, ownedState.claimed - length, length); 117 | 118 | // CAS update that we have published bytes 119 | while(true) { 120 | State current = stateRefs.get(stripe); 121 | State newState = current.publish(length); 122 | boolean didUpdate = stateRefs.compareAndSet(stripe, current, newState); 123 | if (didUpdate) break; 124 | } 125 | } 126 | 127 | @Override 128 | public void write(byte[] b) throws IOException { 129 | write(b, 0, b.length); 130 | } 131 | 132 | @Override 133 | public void write(int b) throws IOException { 134 | write(new byte[] { (byte) (b & 0xff) }); 135 | } 136 | 137 | @Override 138 | public void close() throws IOException { 139 | if (isClosed) throw new IOException("stream is closed"); 140 | isClosed = true; 141 | 142 | int stripe = getStripe(); 143 | 144 | for(int i = 0; i < STRIPE_COUNT; i++) { 145 | flush(i); 146 | } 147 | 148 | out.close(); 149 | } 150 | 151 | @Override 152 | public void flush() throws IOException { 153 | if (isClosed) throw new IOException("stream is closed"); 154 | 155 | for(int i = 0; i < STRIPE_COUNT; i++) { 156 | flush(i); 157 | } 158 | } 159 | 160 | 161 | private void flush(int stripe) throws IOException { 162 | byte[] oldbuff = null; 163 | while(true) { 164 | State current = stateRefs.get(stripe); 165 | 166 | if (oldbuff == null) { 167 | // first time through 168 | oldbuff = current.buffer; 169 | } 170 | 171 | if (current.claimed == 0) { 172 | // nothing to flush 173 | break; 174 | } 175 | 176 | if (oldbuff != current.buffer) { 177 | // someone else managed to change the buffer already 178 | break; 179 | } 180 | 181 | // spin to wait for writes to catch up 182 | if (current.published != current.claimed) { 183 | continue; 184 | } 185 | 186 | State newState = new State(); 187 | 188 | boolean didUpdate = stateRefs.compareAndSet(stripe, current, newState); 189 | 190 | // if we managed to replace the buffer, do the actual flushing 191 | // to downstream 192 | if (didUpdate) { 193 | out.write(current.buffer, 0, current.published); 194 | out.flush(); 195 | return; 196 | } 197 | } 198 | } 199 | } 200 | -------------------------------------------------------------------------------- /src/main/java/com/bluedevel/concurrent/BufferedOutputStream2.java: -------------------------------------------------------------------------------- 1 | package com.bluedevel.concurrent; 2 | 3 | import java.io.Closeable; 4 | import java.io.IOException; 5 | import java.io.ByteArrayOutputStream; 6 | import java.io.OutputStream; 7 | 8 | import org.jctools.queues.MpscArrayQueue; 9 | 10 | /** 11 | * A BufferedOutputStream drop-in replacement that uses an MPSC queue. 12 | * 13 | * Probably prefer the other CAS based stream as it is faster. 14 | */ 15 | public class BufferedOutputStream2 extends OutputStream { 16 | 17 | /** 18 | * Implementation notes: 19 | * 20 | * Values come in on a write() call. 21 | * 22 | * Buffers are not actually copied. A 'chunk' is wrapped 23 | * around a byte array, and that chunk is then placed in 24 | * a queue. A thread is created to read from the MPSC 25 | * queue and push to the destination stream. 26 | */ 27 | 28 | private class Chunk { 29 | public byte[] bytes; 30 | public int offset; 31 | public int length; 32 | 33 | public Chunk(byte[] bytes, int offset, int length) { 34 | this.bytes = bytes; 35 | this.offset = offset; 36 | this.length = length; 37 | } 38 | } 39 | 40 | private class Consumer implements Runnable { 41 | final ByteArrayOutputStream buffer = new ByteArrayOutputStream(24576); 42 | 43 | @Override 44 | public void run() { 45 | try { 46 | while(true) { 47 | Chunk chunk = queue.poll(); 48 | 49 | if (chunk == null) { 50 | Thread.sleep(1); 51 | continue; 52 | } 53 | 54 | if (chunk.offset == -1) { 55 | buffer.writeTo(out); 56 | out.flush(); 57 | } 58 | 59 | if (chunk.offset == -2) { 60 | buffer.writeTo(out); 61 | out.flush(); 62 | out.close(); 63 | break; 64 | } 65 | 66 | buffer.write(chunk.bytes, chunk.offset, chunk.length); 67 | 68 | if (buffer.size() >= 24576) { 69 | buffer.writeTo(out); 70 | buffer.reset(); 71 | } 72 | } 73 | hasFinished = true; 74 | } catch (IOException | InterruptedException e) { 75 | throw new RuntimeException(e); 76 | } 77 | } 78 | } 79 | 80 | private volatile boolean isClosed; 81 | private volatile boolean hasFinished; 82 | private final OutputStream out; 83 | private final MpscArrayQueue queue; 84 | private final Chunk POISON_PILL = new Chunk(null, -2, -2); 85 | private final Chunk FLUSH = new Chunk(null, -1, -1); 86 | public BufferedOutputStream2(final OutputStream out) { 87 | this.isClosed = false; 88 | this.out = out; 89 | this.queue = new MpscArrayQueue<>(8192); 90 | new Thread(new Consumer()).start(); 91 | } 92 | 93 | @Override 94 | public void write(byte[] b, int offset, int length) throws IOException { 95 | if (isClosed) throw new IOException("stream is closed"); 96 | 97 | Chunk chunk = new Chunk(b, offset, length); 98 | 99 | while(!queue.offer(chunk)) { 100 | // busy spin 101 | } 102 | } 103 | 104 | @Override 105 | public void write(byte[] b) throws IOException { 106 | if (isClosed) throw new IOException("stream is closed"); 107 | write(b, 0, b.length); 108 | } 109 | 110 | @Override 111 | public void write(int b) throws IOException { 112 | if (isClosed) throw new IOException("stream is closed"); 113 | write(new byte[] { (byte) (b & 0xff) }); 114 | } 115 | 116 | @Override 117 | public void close() throws IOException { 118 | if (isClosed) throw new IOException("stream is closed"); 119 | isClosed = true; 120 | 121 | while(!queue.offer(POISON_PILL)) { 122 | Thread.yield(); 123 | } 124 | 125 | while(!hasFinished) { 126 | Thread.yield(); 127 | } 128 | } 129 | 130 | @Override 131 | public void flush() throws IOException { 132 | if (isClosed) throw new IOException("stream is closed"); 133 | 134 | while(!queue.offer(FLUSH)) { 135 | Thread.yield(); 136 | } 137 | } 138 | 139 | } 140 | -------------------------------------------------------------------------------- /src/main/java/com/bluedevel/concurrent/CustomBlockingMpmcQueue.java: -------------------------------------------------------------------------------- 1 | package com.bluedevel.concurrent; 2 | 3 | import java.util.concurrent.BlockingQueue; 4 | import java.util.concurrent.TimeUnit; 5 | import java.util.Collection; 6 | 7 | import org.jctools.queues.MpmcArrayQueue; 8 | 9 | /* 10 | * A BlockingQueue implementation built upon Nitsan W's JCTools queues. 11 | * 12 | * Use it in a contended executor service, not much else is supported. 13 | */ 14 | public class CustomBlockingMpmcQueue extends MpmcArrayQueue implements BlockingQueue { 15 | 16 | public CustomBlockingMpmcQueue(int capacity) { 17 | super(capacity); 18 | } 19 | 20 | public void put(E e) throws InterruptedException { 21 | while(!offer(e)) { 22 | Thread.sleep(10); 23 | } 24 | } 25 | 26 | public E take() throws InterruptedException { 27 | while(true) { 28 | E e = poll(); 29 | 30 | if (e != null) return e; 31 | 32 | Thread.sleep(10); 33 | } 34 | } 35 | 36 | public int drainTo(Collection c) { 37 | throw new UnsupportedOperationException("not implemented"); 38 | } 39 | 40 | public int drainTo(Collection c, int maxElements) { 41 | throw new UnsupportedOperationException("not implemented"); 42 | } 43 | 44 | public boolean offer(E e, long timeout, TimeUnit unit) { 45 | throw new UnsupportedOperationException("not implemented"); 46 | } 47 | 48 | public E poll(long timeout, TimeUnit unit) { 49 | throw new UnsupportedOperationException("not implemented"); 50 | } 51 | 52 | public int remainingCapacity() { 53 | throw new UnsupportedOperationException("not implemented"); 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /src/main/java/com/bluedevel/concurrent/ParallelGZIPOutputStream.java: -------------------------------------------------------------------------------- 1 | package com.bluedevel.concurrent; 2 | 3 | import java.io.ByteArrayOutputStream; 4 | import java.io.IOException; 5 | import java.io.OutputStream; 6 | import java.nio.ByteBuffer; 7 | import java.nio.ByteOrder; 8 | import java.util.ArrayList; 9 | import java.util.concurrent.ConcurrentLinkedQueue; 10 | import java.util.concurrent.Callable; 11 | import java.util.concurrent.Future; 12 | import java.util.concurrent.TimeUnit; 13 | import java.util.concurrent.ExecutionException; 14 | import java.util.concurrent.RejectedExecutionHandler; 15 | import java.util.concurrent.ExecutorService; 16 | import java.util.zip.CRC32; 17 | import java.util.zip.Deflater; 18 | import java.util.zip.DeflaterOutputStream; 19 | 20 | /** 21 | * A parallel implementation of a gzip output stream. 22 | * 23 | * A parallel implementation of gzip for the JVM. A drop-in replacement for the existing 24 | * GZIPOutputStream. Achieve a significant improvement in your gzip output performance. 25 | * 26 | * Ground up fresh write, which was inspired by Shevek's work 27 | * https://github.com/shevek/parallelgzip 28 | * 29 | * Works in chunks and gzips each them before passing them to the next 30 | * OutputStream. 31 | * 32 | * For efficiency, you should pass larger chunks to the stream to ensure 33 | * the stream has a chance to compress effectively. No buffering is 34 | * performed inside this class. 35 | * 36 | * Note this uses Thread.sleep(1) as a "backoff" mechanism so it's intended 37 | * to be used quickly and close() as soon as possible. 38 | */ 39 | public class ParallelGZIPOutputStream extends OutputStream { 40 | 41 | /** 42 | * Implementation notes 43 | * 44 | * Caller (upstream) of this class should call write() with appropriately sized chunks. 45 | * Compression tasks are submitted to an executor and placed in a queue for processing. 46 | * There's a chance that some compression tasks will be faster than others, so we use 47 | * a FIFO queue to track the tasks and ensure they are handled by the writer in the 48 | * correct order. 49 | * 50 | * CompressionTask task simply compresses a single chunk and passes the output to downstream 51 | * queue. 52 | * 53 | * Writer thread pulls from queue, waiting if needed to ensure chunks are in order, 54 | * and then writes the actual data to the output stream. 55 | * 56 | * Flush and close are handled by placing a special buffer into the queue. Caller thread 57 | * must ensure a buffer is finalised and sent through to ensure downstream is flushed 58 | * and closed. 59 | */ 60 | 61 | private final ConcurrentLinkedQueue> writerQueue; 62 | private final Writer writer; 63 | private final ExecutorService threadPool; 64 | private volatile boolean isClosed = false; 65 | 66 | public ParallelGZIPOutputStream(final OutputStream out, final ExecutorService tpe) throws IOException { 67 | this.writerQueue = new ConcurrentLinkedQueue>(); 68 | this.threadPool = tpe; 69 | this.writer = new Writer(writerQueue, out); 70 | threadPool.submit(writer); 71 | } 72 | 73 | @Override 74 | public void close() throws IOException { 75 | if (isClosed) throw new IOException("stream already closed"); 76 | isClosed = true; 77 | writerQueue.add(threadPool.submit(() -> OutputBuffer.close())); 78 | } 79 | 80 | @Override 81 | public void flush() throws IOException { 82 | if (isClosed) throw new IOException("stream already closed"); 83 | writerQueue.add(threadPool.submit(() -> OutputBuffer.flush())); 84 | } 85 | 86 | @Override 87 | public void write(byte[] b) throws IOException { 88 | write(b, 0, b.length); 89 | } 90 | 91 | @Override 92 | public void write(byte[] b, int off, int len) throws IOException { 93 | if (isClosed) throw new IOException("stream already closed"); 94 | if (len == 0) return; 95 | 96 | byte[] local = b.clone(); 97 | writerQueue.add(threadPool.submit(new CompressionTask(new InputBuffer(local, off, len)))); 98 | } 99 | 100 | @Override 101 | public void write(int b) throws IOException { 102 | write(new byte[] { (byte) (b & 0xff) }, 0, 1); 103 | } 104 | 105 | /** 106 | * Wraps up input data before compression 107 | */ 108 | private static class InputBuffer { 109 | private final byte[] data; 110 | private final int offset; 111 | private final int length; 112 | public InputBuffer(byte[] data, int offset, int length) { 113 | this.data = data; 114 | this.offset = offset; 115 | this.length = length; 116 | } 117 | } 118 | 119 | /** 120 | * Wraps up a data buffer after compression 121 | */ 122 | private static class OutputBuffer { 123 | private final byte[] data; 124 | private final int offset; 125 | private final int length; 126 | private final boolean flush; 127 | private final boolean close; 128 | private final InputBuffer parent; 129 | public OutputBuffer(byte[] data, int offset, int length, InputBuffer parent, boolean flush, boolean close) { 130 | this.data = data; 131 | this.offset = offset; 132 | this.length = length; 133 | this.parent = parent; 134 | this.flush = flush; 135 | this.close = close; 136 | } 137 | 138 | public OutputBuffer(byte[] data, int offset, int length, InputBuffer parent) { 139 | this.data = data; 140 | this.offset = offset; 141 | this.length = length; 142 | this.parent = parent; 143 | this.flush = false; 144 | this.close = false; 145 | } 146 | 147 | public static OutputBuffer flush() { 148 | return new OutputBuffer(null, 0, 0, null, true, false); 149 | } 150 | 151 | public static OutputBuffer close() { 152 | return new OutputBuffer(null, 0, 0, null, false, true); 153 | } 154 | 155 | } 156 | 157 | /** 158 | * Takes a buffer, compresses it, stores it in another buffer, and returns the result 159 | */ 160 | private static class CompressionTask implements Callable { 161 | 162 | private static class CompressionTaskState { 163 | private final ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream(1024); 164 | private final Deflater deflater = new Deflater(Deflater.DEFAULT_COMPRESSION, true); 165 | private final DeflaterOutputStream deflaterStream = new DeflaterOutputStream(byteBuffer, deflater, 1024, true); 166 | } 167 | 168 | private static final ThreadLocal compressorState = new ThreadLocal() { 169 | protected CompressionTaskState initialValue() { 170 | return new CompressionTaskState(); 171 | } 172 | }; 173 | 174 | private final InputBuffer buffer; 175 | public CompressionTask(InputBuffer buffer) { 176 | this.buffer = buffer; 177 | } 178 | 179 | public OutputBuffer call() throws IOException { 180 | CompressionTaskState state = compressorState.get(); 181 | 182 | state.deflater.reset(); 183 | state.byteBuffer.reset(); 184 | 185 | state.deflaterStream.write(buffer.data, buffer.offset, buffer.length); 186 | state.deflaterStream.flush(); 187 | byte[] bytes = state.byteBuffer.toByteArray(); 188 | 189 | return new OutputBuffer(bytes, 0, bytes.length, buffer); 190 | } 191 | } 192 | 193 | /** 194 | * Processes all buffers coming in from the queue and writes them to the output destination. 195 | */ 196 | private class Writer implements Runnable { 197 | private static final int GZIP_MAGIC = 0x8b1f; 198 | 199 | private final OutputStream out; 200 | private final ConcurrentLinkedQueue> queue; 201 | private long nextToWrite = 0; 202 | private boolean allDone = false; 203 | private long totalBytes = 0; 204 | private final CRC32 crc32 = new CRC32(); 205 | 206 | public Writer(final ConcurrentLinkedQueue> queue, final OutputStream out) throws IOException { 207 | this.queue = queue; 208 | this.out = out; 209 | writeGzipHeader(); 210 | } 211 | 212 | private void writeGzipHeader() throws IOException { 213 | out.write(new byte[] { 214 | (byte) GZIP_MAGIC, 215 | (byte) (GZIP_MAGIC >> 8), 216 | Deflater.DEFLATED, 217 | 0, 218 | 0, 0, 0, 0, 219 | 0, 3 }); 220 | } 221 | 222 | public void run() { 223 | try { 224 | while (!allDone) { 225 | Future next = queue.poll(); 226 | if (next == null) { 227 | Thread.sleep(1); 228 | } else { 229 | handleBuffer(next.get()); 230 | } 231 | } 232 | } catch (ExecutionException | InterruptedException | IOException e) { 233 | e.printStackTrace(); 234 | } 235 | } 236 | 237 | private void handleBuffer(OutputBuffer b) throws IOException { 238 | if (b.data != null) { 239 | out.write(b.data, b.offset, b.length); 240 | totalBytes += b.parent.length; 241 | crc32.update(b.parent.data, b.parent.offset, b.parent.length); 242 | } else if (b.flush) { 243 | out.flush(); 244 | } else if (b.close) { 245 | writeGzipTrailer(); 246 | out.flush(); 247 | out.close(); 248 | allDone = true; 249 | } 250 | } 251 | 252 | private void writeGzipTrailer() throws IOException { 253 | new DeflaterOutputStream(out, new Deflater(Deflater.DEFAULT_COMPRESSION, true), 512, true).finish(); 254 | 255 | ByteBuffer buf = ByteBuffer.allocate(8); 256 | buf.order(ByteOrder.LITTLE_ENDIAN); 257 | buf.putInt((int) crc32.getValue()); 258 | buf.putInt((int) (totalBytes % 4294967296L)); 259 | 260 | out.write(buf.array()); 261 | } 262 | } 263 | } 264 | 265 | -------------------------------------------------------------------------------- /src/test/java/com/bluedevel/concurrent/AsyncOutputStreamQueueTest.java: -------------------------------------------------------------------------------- 1 | package com.bluedevel.concurrent; 2 | 3 | import org.junit.Assert; 4 | import org.junit.Test; 5 | 6 | import java.io.ByteArrayOutputStream; 7 | import java.io.IOException; 8 | 9 | public class AsyncOutputStreamQueueTest { 10 | 11 | class DummyOutputStream extends ByteArrayOutputStream { 12 | public volatile boolean wasClosed = false; 13 | @Override public void close() throws IOException { super.close(); wasClosed = true; } 14 | 15 | public volatile boolean wasFlushed = false; 16 | @Override public void flush() throws IOException { super.flush(); wasFlushed = true; } 17 | } 18 | 19 | 20 | @Test 21 | public void testWriteMultipleElements() throws Exception { 22 | DummyOutputStream dummy = new DummyOutputStream(); 23 | AsyncOutputStreamQueue out = new AsyncOutputStreamQueue(dummy); 24 | 25 | out.write(new byte[] { 'a' }); 26 | out.write(new byte[] { 'b' }); 27 | out.write(new byte[] { 'c' }); 28 | out.write(new byte[] { 'd' }); 29 | out.write(new byte[] { 'e' }); 30 | out.write(new byte[] { 'f' }); 31 | out.write(new byte[] { 'g' }); 32 | out.close(); 33 | 34 | Thread.sleep(10); 35 | 36 | Assert.assertEquals(7, dummy.toByteArray().length); 37 | Assert.assertEquals('a', dummy.toByteArray()[0]); 38 | Assert.assertEquals('b', dummy.toByteArray()[1]); 39 | Assert.assertEquals('c', dummy.toByteArray()[2]); 40 | Assert.assertEquals('d', dummy.toByteArray()[3]); 41 | Assert.assertEquals('e', dummy.toByteArray()[4]); 42 | Assert.assertEquals('f', dummy.toByteArray()[5]); 43 | Assert.assertEquals('g', dummy.toByteArray()[6]); 44 | Assert.assertTrue(dummy.wasFlushed); 45 | Assert.assertTrue(dummy.wasClosed); 46 | 47 | } 48 | 49 | @Test 50 | public void testWriteOneElement() throws Exception { 51 | DummyOutputStream dummy = new DummyOutputStream(); 52 | AsyncOutputStreamQueue out = new AsyncOutputStreamQueue(dummy); 53 | 54 | out.write(new byte[] { 'a' }); 55 | Thread.sleep(10); 56 | 57 | Assert.assertEquals(1, dummy.toByteArray().length); 58 | Assert.assertEquals('a', dummy.toByteArray()[0]); 59 | 60 | out.close(); 61 | } 62 | 63 | @Test 64 | public void testWriteZeroElementsThenClose() throws Exception { 65 | DummyOutputStream dummy = new DummyOutputStream(); 66 | AsyncOutputStreamQueue out = new AsyncOutputStreamQueue(dummy); 67 | 68 | out.write(new byte[0]); 69 | out.write(new byte[0], 0, 0); 70 | Thread.sleep(10); 71 | 72 | Assert.assertFalse(dummy.wasClosed); 73 | 74 | out.close(); 75 | } 76 | 77 | @Test 78 | public void testWriteZeroLengthDoesNotClose() throws Exception { 79 | DummyOutputStream dummy = new DummyOutputStream(); 80 | AsyncOutputStreamQueue out = new AsyncOutputStreamQueue(dummy); 81 | 82 | out.write(new byte[0]); 83 | out.write(new byte[0], 0, 0); 84 | Thread.sleep(10); 85 | 86 | Assert.assertFalse(dummy.wasClosed); 87 | 88 | out.close(); 89 | } 90 | 91 | @Test 92 | public void testCloseTriggersFlush() throws Exception { 93 | DummyOutputStream dummy = new DummyOutputStream(); 94 | AsyncOutputStreamQueue out = new AsyncOutputStreamQueue(dummy); 95 | 96 | out.close(); 97 | Thread.sleep(10); 98 | 99 | Assert.assertTrue(dummy.wasFlushed); 100 | Assert.assertEquals(0, dummy.toByteArray().length); 101 | } 102 | 103 | @Test 104 | public void testCloseTriggersClose() throws Exception { 105 | DummyOutputStream dummy = new DummyOutputStream(); 106 | AsyncOutputStreamQueue out = new AsyncOutputStreamQueue(dummy); 107 | 108 | out.close(); 109 | Thread.sleep(10); 110 | 111 | Assert.assertTrue(dummy.wasClosed); 112 | Assert.assertEquals(0, dummy.toByteArray().length); 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /src/test/java/com/bluedevel/concurrent/BufferedOutputStream2Test.java: -------------------------------------------------------------------------------- 1 | package com.bluedevel.concurrent; 2 | 3 | import org.junit.Assert; 4 | import org.junit.Test; 5 | 6 | import java.io.ByteArrayOutputStream; 7 | import java.io.IOException; 8 | 9 | public class BufferedOutputStream2Test { 10 | 11 | class DummyOutputStream extends ByteArrayOutputStream { 12 | public volatile boolean wasClosed = false; 13 | @Override public void close() throws IOException { super.close(); wasClosed = true; } 14 | 15 | public volatile boolean wasFlushed = false; 16 | @Override public void flush() throws IOException { super.flush(); wasFlushed = true; } 17 | } 18 | 19 | @Test 20 | public void testOneSmallItemDoesNotComeThrough() throws Exception { 21 | DummyOutputStream dummy = new DummyOutputStream(); 22 | BufferedOutputStream2 out = new BufferedOutputStream2(dummy); 23 | 24 | out.write('a'); 25 | Thread.sleep(10); 26 | 27 | Assert.assertEquals(0, dummy.toByteArray().length); 28 | Assert.assertFalse(dummy.wasFlushed); 29 | Assert.assertFalse(dummy.wasClosed); 30 | } 31 | 32 | @Test 33 | public void testManySmallItemsComeAsChunk() throws Exception { 34 | DummyOutputStream dummy = new DummyOutputStream(); 35 | BufferedOutputStream2 out = new BufferedOutputStream2(dummy); 36 | 37 | for(int i = 0; i < 32000; i++) 38 | out.write('a'); 39 | Thread.sleep(10); 40 | 41 | // should have had one flush exactly, of one less than buffer size 42 | byte[] flushed = dummy.toByteArray(); 43 | Assert.assertEquals(24576, flushed.length); 44 | for(int i = 0; i < flushed.length; i++) 45 | Assert.assertEquals('a', flushed[i]); 46 | 47 | Assert.assertFalse(dummy.wasClosed); 48 | } 49 | 50 | @Test 51 | public void testCloseWorks() throws Exception { 52 | DummyOutputStream dummy = new DummyOutputStream(); 53 | BufferedOutputStream2 out = new BufferedOutputStream2(dummy); 54 | 55 | for(int i = 0; i < 32000; i += 10) { 56 | out.write(new byte[] { 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j' }); 57 | } 58 | 59 | out.close(); 60 | Thread.sleep(10); 61 | 62 | // should have had two flushes, and be at full length 63 | byte[] flushed = dummy.toByteArray(); 64 | Assert.assertEquals(32000, flushed.length); 65 | Assert.assertTrue(dummy.wasFlushed); 66 | for(int i = 0; i < flushed.length; i += 10) { 67 | Assert.assertEquals('a', flushed[i + 0]); 68 | Assert.assertEquals('b', flushed[i + 1]); 69 | Assert.assertEquals('c', flushed[i + 2]); 70 | Assert.assertEquals('d', flushed[i + 3]); 71 | Assert.assertEquals('e', flushed[i + 4]); 72 | Assert.assertEquals('f', flushed[i + 5]); 73 | Assert.assertEquals('g', flushed[i + 6]); 74 | Assert.assertEquals('h', flushed[i + 7]); 75 | Assert.assertEquals('i', flushed[i + 8]); 76 | Assert.assertEquals('j', flushed[i + 9]); 77 | } 78 | 79 | Assert.assertTrue(dummy.wasClosed); 80 | } 81 | 82 | @Test 83 | public void testFlushWorks() throws Exception { 84 | DummyOutputStream dummy = new DummyOutputStream(); 85 | BufferedOutputStream2 out = new BufferedOutputStream2(dummy); 86 | 87 | for(int i = 0; i < 32000; i++) 88 | out.write('a'); 89 | 90 | out.flush(); 91 | Thread.sleep(10); 92 | 93 | // should have had two flushes, and be at full length 94 | byte[] flushed = dummy.toByteArray(); 95 | Assert.assertEquals(32000, flushed.length); 96 | Assert.assertTrue(dummy.wasFlushed); 97 | for(int i = 0; i < flushed.length; i++) 98 | Assert.assertEquals('a', flushed[i]); 99 | 100 | Assert.assertFalse(dummy.wasClosed); 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /src/test/java/com/bluedevel/concurrent/BufferedOutputStreamTest.java: -------------------------------------------------------------------------------- 1 | package com.bluedevel.concurrent; 2 | 3 | import org.junit.Assert; 4 | import org.junit.Test; 5 | 6 | import java.io.ByteArrayOutputStream; 7 | import java.io.IOException; 8 | 9 | public class BufferedOutputStreamTest { 10 | 11 | class DummyOutputStream extends ByteArrayOutputStream { 12 | public volatile boolean wasClosed = false; 13 | @Override public void close() throws IOException { super.close(); wasClosed = true; } 14 | 15 | public volatile boolean wasFlushed = false; 16 | @Override public void flush() throws IOException { super.flush(); wasFlushed = true; } 17 | } 18 | 19 | @Test 20 | public void testOneSmallItemDoesNotComeThrough() throws Exception { 21 | DummyOutputStream dummy = new DummyOutputStream(); 22 | BufferedOutputStream out = new BufferedOutputStream(dummy); 23 | 24 | out.write('a'); 25 | 26 | Assert.assertEquals(0, dummy.toByteArray().length); 27 | Assert.assertFalse(dummy.wasFlushed); 28 | Assert.assertFalse(dummy.wasClosed); 29 | } 30 | 31 | @Test 32 | public void testManySmallItemsComeAsChunk() throws Exception { 33 | DummyOutputStream dummy = new DummyOutputStream(); 34 | BufferedOutputStream out = new BufferedOutputStream(dummy); 35 | 36 | for(int i = 0; i < 32000; i++) 37 | out.write('a'); 38 | 39 | // should have had one flush exactly, of one less than buffer size 40 | byte[] flushed = dummy.toByteArray(); 41 | Assert.assertEquals(24575, flushed.length); 42 | for(int i = 0; i < flushed.length; i++) 43 | Assert.assertEquals('a', flushed[i]); 44 | 45 | Assert.assertFalse(dummy.wasClosed); 46 | } 47 | 48 | @Test 49 | public void testCloseWorks() throws Exception { 50 | DummyOutputStream dummy = new DummyOutputStream(); 51 | BufferedOutputStream out = new BufferedOutputStream(dummy); 52 | 53 | for(int i = 0; i < 32000; i += 10) { 54 | out.write(new byte[] { 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j' }); 55 | } 56 | 57 | out.close(); 58 | 59 | // should have had two flushes, and be at full length 60 | byte[] flushed = dummy.toByteArray(); 61 | Assert.assertEquals(32000, flushed.length); 62 | Assert.assertTrue(dummy.wasFlushed); 63 | for(int i = 0; i < flushed.length; i += 10) { 64 | Assert.assertEquals('a', flushed[i + 0]); 65 | Assert.assertEquals('b', flushed[i + 1]); 66 | Assert.assertEquals('c', flushed[i + 2]); 67 | Assert.assertEquals('d', flushed[i + 3]); 68 | Assert.assertEquals('e', flushed[i + 4]); 69 | Assert.assertEquals('f', flushed[i + 5]); 70 | Assert.assertEquals('g', flushed[i + 6]); 71 | Assert.assertEquals('h', flushed[i + 7]); 72 | Assert.assertEquals('i', flushed[i + 8]); 73 | Assert.assertEquals('j', flushed[i + 9]); 74 | } 75 | 76 | Assert.assertTrue(dummy.wasClosed); 77 | } 78 | 79 | @Test 80 | public void testFlushWorks() throws Exception { 81 | DummyOutputStream dummy = new DummyOutputStream(); 82 | BufferedOutputStream out = new BufferedOutputStream(dummy); 83 | 84 | for(int i = 0; i < 32000; i++) 85 | out.write('a'); 86 | 87 | out.flush(); 88 | 89 | // should have had two flushes, and be at full length 90 | byte[] flushed = dummy.toByteArray(); 91 | Assert.assertEquals(32000, flushed.length); 92 | Assert.assertTrue(dummy.wasFlushed); 93 | for(int i = 0; i < flushed.length; i++) 94 | Assert.assertEquals('a', flushed[i]); 95 | 96 | Assert.assertFalse(dummy.wasClosed); 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/test/java/com/bluedevel/concurrent/CustomBlockingMpmcQueueTest.java: -------------------------------------------------------------------------------- 1 | package com.bluedevel.concurrent; 2 | 3 | import org.junit.Assert; 4 | import org.junit.Test; 5 | 6 | import java.util.concurrent.atomic.AtomicLong; 7 | 8 | public class CustomBlockingMpmcQueueTest { 9 | 10 | @Test 11 | public void testProduceOneFirstThenConsume() throws Exception { 12 | // producer should finish immediately, consumer should get message 13 | CustomBlockingMpmcQueue q = new CustomBlockingMpmcQueue(100); 14 | Object e = new Object(); 15 | q.put(e); 16 | 17 | Assert.assertEquals(e, q.take()); 18 | Assert.assertNull(q.peek()); 19 | } 20 | 21 | @Test 22 | public void testConsumeFirstThenProduceOne() throws Exception { 23 | // consumer should block until such time as the producer has placed a message 24 | CustomBlockingMpmcQueue q = new CustomBlockingMpmcQueue(100); 25 | Object e = new Object(); 26 | 27 | new Thread(() -> { 28 | try { 29 | Thread.sleep(100); 30 | q.put(e); 31 | } catch (InterruptedException ie) { 32 | throw new RuntimeException(ie); 33 | } 34 | }).start(); 35 | 36 | Assert.assertEquals(e, q.take()); 37 | Assert.assertNull(q.peek()); 38 | } 39 | 40 | @Test 41 | public void testConsumeFirstThenProduce100() throws Exception { 42 | // consumer should block until the producer has placed a message, then get the rest 43 | CustomBlockingMpmcQueue q = new CustomBlockingMpmcQueue(200); 44 | 45 | Object[] objs = new Object[100]; 46 | for(int i = 0; i < objs.length; i++) 47 | objs[i] = new Object(); 48 | 49 | new Thread(() -> { 50 | try { 51 | Thread.sleep(100); 52 | for(int i = 0; i < objs.length; i++) 53 | q.put(objs[i]); 54 | } catch (InterruptedException ie) { 55 | throw new RuntimeException(ie); 56 | } 57 | }).start(); 58 | 59 | for(int i = 0; i < objs.length; i++) 60 | Assert.assertEquals(objs[i], q.take()); 61 | 62 | Assert.assertNull(q.peek()); 63 | } 64 | 65 | @Test 66 | public void testProduceTooManyAndConsumeSlowly() throws Exception { 67 | // with a capacity of 8 produce 20 and have them consumed with 0.2sec delay 68 | // producer should block (total delays would be 8-20=12 queued * 0.2s = 2.4sec) 69 | CustomBlockingMpmcQueue q = new CustomBlockingMpmcQueue(8); 70 | 71 | Object[] objs = new Object[20]; 72 | for(int i = 0; i < objs.length; i++) 73 | objs[i] = new Object(); 74 | 75 | final AtomicLong putFinishedMillis = new AtomicLong(0); 76 | 77 | new Thread(() -> { 78 | try { 79 | for(int i = 0; i < objs.length; i++) 80 | q.put(objs[i]); 81 | putFinishedMillis.set(System.currentTimeMillis()); 82 | } catch (InterruptedException ie) { 83 | throw new RuntimeException(ie); 84 | } 85 | }).start(); 86 | 87 | for(int i = 0; i < objs.length; i++) { 88 | Thread.sleep(100); 89 | Assert.assertEquals(objs[i], q.take()); 90 | } 91 | 92 | // the put should end just after 1.2sec mark, since 12 elements have to wait 93 | // the total time to process should be 2 sec 94 | // so the gap should be just under 0.8 sec 95 | 96 | Assert.assertTrue((System.currentTimeMillis() - putFinishedMillis.get()) > 750); 97 | Assert.assertTrue((System.currentTimeMillis() - putFinishedMillis.get()) < 850); 98 | Assert.assertNull(q.peek()); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /src/test/java/com/bluedevel/concurrent/ParallelGZIPOutputStreamTest.java: -------------------------------------------------------------------------------- 1 | package com.bluedevel.concurrent; 2 | 3 | import org.junit.Assert; 4 | import org.junit.Test; 5 | 6 | import java.io.ByteArrayInputStream; 7 | import java.io.ByteArrayOutputStream; 8 | import java.io.InputStream; 9 | import java.io.IOException; 10 | import java.util.Random; 11 | import java.util.concurrent.Executors; 12 | import java.util.concurrent.ExecutorService; 13 | import java.util.zip.GZIPInputStream; 14 | 15 | public class ParallelGZIPOutputStreamTest { 16 | 17 | ExecutorService threads = Executors.newCachedThreadPool(); 18 | 19 | class DummyOutputStream extends ByteArrayOutputStream { 20 | public volatile boolean wasClosed = false; 21 | @Override public void close() throws IOException { super.close(); wasClosed = true; } 22 | 23 | public volatile boolean wasFlushed = false; 24 | @Override public void flush() throws IOException { super.flush(); wasFlushed = true; } 25 | } 26 | 27 | @Test 28 | public void testZeroByteStream() throws Exception { 29 | DummyOutputStream dummy = new DummyOutputStream(); 30 | ParallelGZIPOutputStream out = new ParallelGZIPOutputStream(dummy, threads); 31 | 32 | out.flush(); 33 | out.close(); 34 | 35 | Thread.sleep(100); 36 | 37 | GZIPInputStream input = new GZIPInputStream(new ByteArrayInputStream(dummy.toByteArray())); 38 | byte[] result = readInputStream(input); 39 | 40 | Assert.assertEquals(0, result.length); 41 | Assert.assertTrue(dummy.wasClosed); 42 | } 43 | 44 | @Test 45 | public void test1ByteStream() throws Exception { 46 | DummyOutputStream dummy = new DummyOutputStream(); 47 | ParallelGZIPOutputStream out = new ParallelGZIPOutputStream(dummy, threads); 48 | 49 | out.write('a'); 50 | out.flush(); 51 | out.close(); 52 | 53 | Thread.sleep(100); 54 | 55 | byte[] result = readInputStream(new GZIPInputStream(new ByteArrayInputStream(dummy.toByteArray()))); 56 | 57 | Assert.assertEquals(1, result.length); 58 | Assert.assertEquals('a', result[0]); 59 | Assert.assertTrue(dummy.wasClosed); 60 | } 61 | 62 | @Test 63 | public void test1000ByteStream() throws Exception { 64 | DummyOutputStream dummy = new DummyOutputStream(); 65 | ParallelGZIPOutputStream out = new ParallelGZIPOutputStream(dummy, threads); 66 | 67 | Random r = new Random(); 68 | byte[] raw = new byte[1000]; 69 | r.nextBytes(raw); 70 | 71 | out.write(raw); 72 | out.flush(); 73 | out.close(); 74 | 75 | Thread.sleep(100); 76 | 77 | byte[] result = readInputStream(new GZIPInputStream(new ByteArrayInputStream(dummy.toByteArray()))); 78 | 79 | Assert.assertArrayEquals(raw, result); 80 | } 81 | 82 | @Test 83 | public void testMany1000ByteChunks() throws Exception { 84 | DummyOutputStream dummy = new DummyOutputStream(); 85 | ParallelGZIPOutputStream out = new ParallelGZIPOutputStream(dummy, threads); 86 | 87 | Random r = new Random(); 88 | byte[] raw = new byte[1000*1000]; 89 | r.nextBytes(raw); 90 | 91 | for(int i = 0; i < 1000000; i += 1000) { 92 | out.write(raw, i, 1000); 93 | } 94 | 95 | out.flush(); 96 | out.close(); 97 | 98 | // let it catch up 99 | Thread.sleep(1000); 100 | 101 | byte[] result = readInputStream(new GZIPInputStream(new ByteArrayInputStream(dummy.toByteArray()))); 102 | 103 | Assert.assertArrayEquals(raw, result); 104 | } 105 | 106 | private byte[] readInputStream(InputStream input) throws IOException { 107 | byte[] buffer = new byte[1024]; 108 | ByteArrayOutputStream out = new ByteArrayOutputStream(); 109 | 110 | int len; 111 | while ((len = input.read(buffer)) > 0) { 112 | out.write(buffer, 0, len); 113 | } 114 | 115 | out.close(); 116 | return out.toByteArray(); 117 | } 118 | 119 | } 120 | --------------------------------------------------------------------------------