use of java.io.SequenceInputStream in project databus by linkedin.
the class XmlFormatTrailParser method wrapStreamWithXmlTags.
/**
* Make the trail files input look like real XML
*/
private InputStream wrapStreamWithXmlTags(InputStream compositeInputStream) {
String xmlStart = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n" + DTD + "\n<root>";
String xmlEnd = "</root>";
_log.info("The xml start tag used is:" + xmlStart);
List<InputStream> xmlTagsList = Arrays.asList(new InputStream[] { new ByteArrayInputStream(xmlStart.getBytes(Charset.forName("ISO-8859-1"))), compositeInputStream, new ByteArrayInputStream(xmlEnd.getBytes(Charset.forName("ISO-8859-1"))) });
Enumeration<InputStream> streams = Collections.enumeration(xmlTagsList);
SequenceInputStream seqStream = new SequenceInputStream(streams);
return seqStream;
}
use of java.io.SequenceInputStream in project databus by linkedin.
the class GoldenGateEventProducer method wrapStreamWithXmlTags.
/**
* The method takes the an inputstream as an input and wraps it around with xml tags,
* sets the xml encoding and xml version specified in the physical sources config.
* @param compositeInputStream The inputstream to be wrapped with the xml tags
* @return
*/
private InputStream wrapStreamWithXmlTags(InputStream compositeInputStream) {
String xmlVersion = _pConfig.getXmlVersion();
String xmlEncoding = _pConfig.getXmlEncoding();
String xmlStart = "<?xml version=\"" + xmlVersion + "\" encoding=\"" + xmlEncoding + "\"?>\n<root>";
String xmlEnd = "</root>";
_log.info("The xml start tag used is:" + xmlStart);
List xmlTagsList = Arrays.asList(new InputStream[] { new ByteArrayInputStream(xmlStart.getBytes(Charset.forName(xmlEncoding))), compositeInputStream, new ByteArrayInputStream(xmlEnd.getBytes(Charset.forName(xmlEncoding))) });
Enumeration<InputStream> streams = Collections.enumeration(xmlTagsList);
SequenceInputStream seqStream = new SequenceInputStream(streams);
return seqStream;
}
use of java.io.SequenceInputStream in project databus by linkedin.
the class ConcurrentAppendableCompositeFileInputStream method initializeStream.
/**
* Sets up the sequence stream from the trail files located.
* @throws IOException
*/
public void initializeStream() throws IOException {
_streamEnumerator = new InputStreamEnumerator(_staticStream, _trailFileManager, _ggParserStats);
boolean success = _trailFileLocator.fetchOneTime();
if (!success)
return;
_seqStream = new SequenceInputStream(_streamEnumerator);
if (!_staticStream)
// Note: TrailFileLocator needs to be started only after seqStream is constructed.
_trailFileLocator.start();
_initDone = true;
}
use of java.io.SequenceInputStream in project hbase by apache.
the class HFileInfo method read.
/**
* Populate this instance with what we find on the passed in <code>in</code> stream.
* Can deserialize protobuf of old Writables format.
* @see #write(DataOutputStream)
*/
void read(final DataInputStream in) throws IOException {
// This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.
int pblen = ProtobufUtil.lengthOfPBMagic();
byte[] pbuf = new byte[pblen];
if (in.markSupported()) {
in.mark(pblen);
}
int read = in.read(pbuf);
if (read != pblen) {
throw new IOException("read=" + read + ", wanted=" + pblen);
}
if (ProtobufUtil.isPBMagicPrefix(pbuf)) {
parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));
} else {
if (in.markSupported()) {
in.reset();
parseWritable(in);
} else {
// We cannot use BufferedInputStream, it consumes more than we read from the underlying IS
ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);
// Concatenate input streams
SequenceInputStream sis = new SequenceInputStream(bais, in);
// TODO: Am I leaking anything here wrapping the passed in stream? We are not calling
// close on the wrapped streams but they should be let go after we leave this context?
// I see that we keep a reference to the passed in inputstream but since we no longer
// have a reference to this after we leave, we should be ok.
parseWritable(new DataInputStream(sis));
}
}
}
use of java.io.SequenceInputStream in project beam by apache.
the class GrpcWindmillServerTest method testStreamingCommit.
@Test
public void testStreamingCommit() throws Exception {
List<WorkItemCommitRequest> commitRequestList = new ArrayList<>();
List<CountDownLatch> latches = new ArrayList<>();
Map<Long, WorkItemCommitRequest> commitRequests = new HashMap<>();
for (int i = 0; i < 500; ++i) {
// Build some requests of varying size with a few big ones.
WorkItemCommitRequest request = makeCommitRequest(i, i * (i < 480 ? 8 : 128));
commitRequestList.add(request);
commitRequests.put((long) i, request);
latches.add(new CountDownLatch(1));
}
Collections.shuffle(commitRequestList);
// This server receives WorkItemCommitRequests, and verifies they are equal to the above
// commitRequest.
serviceRegistry.addService(new CloudWindmillServiceV1Alpha1ImplBase() {
@Override
public StreamObserver<StreamingCommitWorkRequest> commitWorkStream(StreamObserver<StreamingCommitResponse> responseObserver) {
return new StreamObserver<StreamingCommitWorkRequest>() {
boolean sawHeader = false;
InputStream buffer = null;
long remainingBytes = 0;
ResponseErrorInjector injector = new ResponseErrorInjector(responseObserver);
@Override
public void onNext(StreamingCommitWorkRequest request) {
maybeInjectError(responseObserver);
if (!sawHeader) {
errorCollector.checkThat(request.getHeader(), Matchers.equalTo(JobHeader.newBuilder().setJobId("job").setProjectId("project").setWorkerId("worker").build()));
sawHeader = true;
LOG.info("Received header");
} else {
boolean first = true;
LOG.info("Received request with {} chunks", request.getCommitChunkCount());
for (StreamingCommitRequestChunk chunk : request.getCommitChunkList()) {
assertTrue(chunk.getSerializedWorkItemCommit().size() <= STREAM_CHUNK_SIZE);
if (first || chunk.hasComputationId()) {
errorCollector.checkThat(chunk.getComputationId(), Matchers.equalTo("computation"));
}
if (remainingBytes != 0) {
errorCollector.checkThat(buffer, Matchers.notNullValue());
errorCollector.checkThat(remainingBytes, Matchers.is(chunk.getSerializedWorkItemCommit().size() + chunk.getRemainingBytesForWorkItem()));
buffer = new SequenceInputStream(buffer, chunk.getSerializedWorkItemCommit().newInput());
} else {
errorCollector.checkThat(buffer, Matchers.nullValue());
buffer = chunk.getSerializedWorkItemCommit().newInput();
}
remainingBytes = chunk.getRemainingBytesForWorkItem();
if (remainingBytes == 0) {
try {
WorkItemCommitRequest received = WorkItemCommitRequest.parseFrom(buffer);
errorCollector.checkThat(received, Matchers.equalTo(commitRequests.get(received.getWorkToken())));
try {
responseObserver.onNext(StreamingCommitResponse.newBuilder().addRequestId(chunk.getRequestId()).build());
} catch (IllegalStateException e) {
// Stream is closed.
}
} catch (Exception e) {
errorCollector.addError(e);
}
buffer = null;
} else {
errorCollector.checkThat(first, Matchers.is(true));
}
first = false;
}
}
}
@Override
public void onError(Throwable throwable) {
}
@Override
public void onCompleted() {
injector.cancel();
responseObserver.onCompleted();
}
};
}
});
// Make the commit requests, waiting for each of them to be verified and acknowledged.
CommitWorkStream stream = client.commitWorkStream();
for (int i = 0; i < commitRequestList.size(); ) {
final CountDownLatch latch = latches.get(i);
if (stream.commitWorkItem("computation", commitRequestList.get(i), (CommitStatus status) -> {
assertEquals(status, CommitStatus.OK);
latch.countDown();
})) {
i++;
} else {
stream.flush();
}
}
stream.flush();
for (CountDownLatch latch : latches) {
assertTrue(latch.await(1, TimeUnit.MINUTES));
}
stream.close();
assertTrue(stream.awaitTermination(30, TimeUnit.SECONDS));
}
Aggregations