use of org.opensearch.common.io.stream.OutputStreamStreamOutput in project OpenSearch by opensearch-project.
the class ScriptTests method testScriptSerialization.
public void testScriptSerialization() throws IOException {
Script expectedScript = createScript();
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
expectedScript.writeTo(new OutputStreamStreamOutput(out));
try (ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray())) {
Script actualScript = new Script(new InputStreamStreamInput(in));
assertThat(actualScript, equalTo(expectedScript));
}
}
}
use of org.opensearch.common.io.stream.OutputStreamStreamOutput in project OpenSearch by opensearch-project.
the class TransportDecompressorTests method testIncrementalMultiPageCompression.
public void testIncrementalMultiPageCompression() throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) {
try (StreamOutput deflateStream = new OutputStreamStreamOutput(CompressorFactory.COMPRESSOR.threadLocalOutputStream(Streams.flushOnCloseStream(output)))) {
for (int i = 0; i < 10000; ++i) {
deflateStream.writeInt(i);
}
}
BytesReference bytes = output.bytes();
TransportDecompressor decompressor = new TransportDecompressor(PageCacheRecycler.NON_RECYCLING_INSTANCE);
int split1 = (int) (bytes.length() * 0.3);
int split2 = (int) (bytes.length() * 0.65);
BytesReference inbound1 = bytes.slice(0, split1);
BytesReference inbound2 = bytes.slice(split1, split2 - split1);
BytesReference inbound3 = bytes.slice(split2, bytes.length() - split2);
int bytesConsumed1 = decompressor.decompress(inbound1);
assertEquals(inbound1.length(), bytesConsumed1);
assertFalse(decompressor.isEOS());
int bytesConsumed2 = decompressor.decompress(inbound2);
assertEquals(inbound2.length(), bytesConsumed2);
assertFalse(decompressor.isEOS());
int bytesConsumed3 = decompressor.decompress(inbound3);
assertEquals(inbound3.length(), bytesConsumed3);
assertTrue(decompressor.isEOS());
ReleasableBytesReference reference1 = decompressor.pollDecompressedPage();
ReleasableBytesReference reference2 = decompressor.pollDecompressedPage();
ReleasableBytesReference reference3 = decompressor.pollDecompressedPage();
assertNull(decompressor.pollDecompressedPage());
BytesReference composite = CompositeBytesReference.of(reference1, reference2, reference3);
assertEquals(4 * 10000, composite.length());
StreamInput streamInput = composite.streamInput();
for (int i = 0; i < 10000; ++i) {
assertEquals(i, streamInput.readInt());
}
Releasables.close(reference1, reference2, reference3);
}
}
use of org.opensearch.common.io.stream.OutputStreamStreamOutput in project OpenSearch by opensearch-project.
the class PublicationTransportHandler method serializeFullClusterState.
private static BytesReference serializeFullClusterState(ClusterState clusterState, Version nodeVersion) throws IOException {
final BytesStreamOutput bStream = new BytesStreamOutput();
try (StreamOutput stream = new OutputStreamStreamOutput(CompressorFactory.COMPRESSOR.threadLocalOutputStream(bStream))) {
stream.setVersion(nodeVersion);
stream.writeBoolean(true);
clusterState.writeTo(stream);
}
final BytesReference serializedState = bStream.bytes();
logger.trace("serialized full cluster state version [{}] for node version [{}] with size [{}]", clusterState.version(), nodeVersion, serializedState.length());
return serializedState;
}
use of org.opensearch.common.io.stream.OutputStreamStreamOutput in project OpenSearch by opensearch-project.
the class ExplainActionIT method testStreamExplain.
public void testStreamExplain() throws Exception {
Explanation exp = Explanation.match(2f, "some explanation");
// write
ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer);
Lucene.writeExplanation(out, exp);
// read
ByteArrayInputStream esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
InputStreamStreamInput esBuffer = new InputStreamStreamInput(esInBuffer);
Explanation result = Lucene.readExplanation(esBuffer);
assertThat(exp.toString(), equalTo(result.toString()));
exp = Explanation.match(2.0f, "some explanation", Explanation.match(2.0f, "another explanation"));
// write complex
outBuffer = new ByteArrayOutputStream();
out = new OutputStreamStreamOutput(outBuffer);
Lucene.writeExplanation(out, exp);
// read complex
esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
esBuffer = new InputStreamStreamInput(esInBuffer);
result = Lucene.readExplanation(esBuffer);
assertThat(exp.toString(), equalTo(result.toString()));
}
use of org.opensearch.common.io.stream.OutputStreamStreamOutput in project OpenSearch by opensearch-project.
the class TranslogHeader method write.
/**
* Writes this header with the latest format into the file channel
*/
void write(final FileChannel channel) throws IOException {
// This output is intentionally not closed because closing it will close the FileChannel.
@SuppressWarnings({ "IOResourceOpenedButNotSafelyClosed", "resource" }) final BufferedChecksumStreamOutput out = new BufferedChecksumStreamOutput(new OutputStreamStreamOutput(java.nio.channels.Channels.newOutputStream(channel)));
CodecUtil.writeHeader(new OutputStreamDataOutput(out), TRANSLOG_CODEC, CURRENT_VERSION);
// Write uuid
final BytesRef uuid = new BytesRef(translogUUID);
out.writeInt(uuid.length);
out.writeBytes(uuid.bytes, uuid.offset, uuid.length);
// Write primary term
out.writeLong(primaryTerm);
// Checksum header
out.writeInt((int) out.getChecksum());
out.flush();
channel.force(true);
assert channel.position() == headerSizeInBytes : "Header is not fully written; header size [" + headerSizeInBytes + "], channel position [" + channel.position() + "]";
}
Aggregations