use of java.io.SequenceInputStream in project eclipse.platform.text by eclipse.
the class FileStoreTextFileBuffer method commitFileBufferContent.
@Override
protected void commitFileBufferContent(IProgressMonitor monitor, boolean overwrite) throws CoreException {
if (!isSynchronized() && !overwrite) {
String message = NLSUtility.format(FileBuffersMessages.FileBuffer_error_outOfSync, getFileStore().toURI());
throw new CoreException(new Status(IStatus.WARNING, FileBuffersPlugin.PLUGIN_ID, IResourceStatus.OUT_OF_SYNC_LOCAL, message, null));
}
String encoding = computeEncoding();
Charset charset;
try {
charset = Charset.forName(encoding);
} catch (UnsupportedCharsetException ex) {
String message = NLSUtility.format(FileBuffersMessages.ResourceTextFileBuffer_error_unsupported_encoding_message_arg, encoding);
IStatus s = new Status(IStatus.ERROR, FileBuffersPlugin.PLUGIN_ID, IStatus.OK, message, ex);
throw new CoreException(s);
} catch (IllegalCharsetNameException ex) {
String message = NLSUtility.format(FileBuffersMessages.ResourceTextFileBuffer_error_illegal_encoding_message_arg, encoding);
IStatus s = new Status(IStatus.ERROR, FileBuffersPlugin.PLUGIN_ID, IStatus.OK, message, ex);
throw new CoreException(s);
}
CharsetEncoder encoder = charset.newEncoder();
encoder.onMalformedInput(CodingErrorAction.REPLACE);
encoder.onUnmappableCharacter(CodingErrorAction.REPORT);
byte[] bytes;
int bytesLength;
try {
ByteBuffer byteBuffer = encoder.encode(CharBuffer.wrap(fDocument.get()));
bytesLength = byteBuffer.limit();
if (byteBuffer.hasArray())
bytes = byteBuffer.array();
else {
bytes = new byte[bytesLength];
byteBuffer.get(bytes);
}
} catch (CharacterCodingException ex) {
Assert.isTrue(ex instanceof UnmappableCharacterException);
String message = NLSUtility.format(FileBuffersMessages.ResourceTextFileBuffer_error_charset_mapping_failed_message_arg, encoding);
IStatus s = new Status(IStatus.ERROR, FileBuffersPlugin.PLUGIN_ID, IFileBufferStatusCodes.CHARSET_MAPPING_FAILED, message, null);
throw new CoreException(s);
}
IFileInfo fileInfo = fFileStore.fetchInfo();
if (fileInfo != null && fileInfo.exists()) {
if (!overwrite)
checkSynchronizationState();
InputStream stream = new ByteArrayInputStream(bytes, 0, bytesLength);
/*
* XXX:
* This is a workaround for a corresponding bug in Java readers and writer,
* see http://developer.java.sun.com/developer/bugParade/bugs/4508058.html
*/
if (fHasBOM && CHARSET_UTF_8.equals(encoding))
stream = new SequenceInputStream(new ByteArrayInputStream(IContentDescription.BOM_UTF_8), stream);
// here the file synchronizer should actually be removed and afterwards added again. However,
// we are already inside an operation, so the delta is sent AFTER we have added the listener
setFileContents(stream, monitor);
// set synchronization stamp to know whether the file synchronizer must become active
fSynchronizationStamp = fFileStore.fetchInfo().getLastModified();
if (fAnnotationModel instanceof IPersistableAnnotationModel) {
IPersistableAnnotationModel persistableModel = (IPersistableAnnotationModel) fAnnotationModel;
persistableModel.commit(fDocument);
}
} else {
fFileStore.getParent().mkdir(EFS.NONE, null);
try (OutputStream out = fFileStore.openOutputStream(EFS.NONE, null)) {
/*
* XXX:
* This is a workaround for a corresponding bug in Java readers and writer,
* see http://developer.java.sun.com/developer/bugParade/bugs/4508058.html
*/
if (fHasBOM && CHARSET_UTF_8.equals(encoding))
out.write(IContentDescription.BOM_UTF_8);
out.write(bytes, 0, bytesLength);
out.flush();
out.close();
} catch (IOException x) {
IStatus s = new Status(IStatus.ERROR, FileBuffersPlugin.PLUGIN_ID, IStatus.OK, x.getLocalizedMessage(), x);
throw new CoreException(s);
}
// set synchronization stamp to know whether the file synchronizer must become active
fSynchronizationStamp = fFileStore.fetchInfo().getLastModified();
}
}
use of java.io.SequenceInputStream in project eclipse.platform.text by eclipse.
the class ResourceTextFileBuffer method commitFileBufferContent.
@Override
protected void commitFileBufferContent(IProgressMonitor monitor, boolean overwrite) throws CoreException {
if (!isSynchronized() && !overwrite) {
String message = NLSUtility.format(FileBuffersMessages.FileBuffer_error_outOfSync, getFileStore().toURI());
throw new CoreException(new Status(IStatus.WARNING, FileBuffersPlugin.PLUGIN_ID, IResourceStatus.OUT_OF_SYNC_LOCAL, message, null));
}
String encoding = computeEncoding();
if (fBOM == IContentDescription.BOM_UTF_16LE && CHARSET_UTF_16.equals(encoding))
encoding = CHARSET_UTF_16LE;
Charset charset;
try {
charset = Charset.forName(encoding);
} catch (UnsupportedCharsetException ex) {
String message = NLSUtility.format(FileBuffersMessages.ResourceTextFileBuffer_error_unsupported_encoding_message_arg, encoding);
IStatus s = new Status(IStatus.ERROR, FileBuffersPlugin.PLUGIN_ID, IStatus.OK, message, ex);
throw new CoreException(s);
} catch (IllegalCharsetNameException ex) {
String message = NLSUtility.format(FileBuffersMessages.ResourceTextFileBuffer_error_illegal_encoding_message_arg, encoding);
IStatus s = new Status(IStatus.ERROR, FileBuffersPlugin.PLUGIN_ID, IStatus.OK, message, ex);
throw new CoreException(s);
}
CharsetEncoder encoder = charset.newEncoder();
encoder.onMalformedInput(CodingErrorAction.REPLACE);
encoder.onUnmappableCharacter(CodingErrorAction.REPORT);
InputStream stream;
try {
byte[] bytes;
ByteBuffer byteBuffer = encoder.encode(CharBuffer.wrap(fDocument.get()));
if (byteBuffer.hasArray())
bytes = byteBuffer.array();
else {
bytes = new byte[byteBuffer.limit()];
byteBuffer.get(bytes);
}
stream = new ByteArrayInputStream(bytes, 0, byteBuffer.limit());
} catch (CharacterCodingException ex) {
Assert.isTrue(ex instanceof UnmappableCharacterException);
String message = NLSUtility.format(FileBuffersMessages.ResourceTextFileBuffer_error_charset_mapping_failed_message_arg, encoding);
IStatus s = new Status(IStatus.ERROR, FileBuffersPlugin.PLUGIN_ID, IFileBufferStatusCodes.CHARSET_MAPPING_FAILED, message, ex);
throw new CoreException(s);
}
/*
* XXX:
* This is a workaround for a corresponding bug in Java readers and writer,
* see http://developer.java.sun.com/developer/bugParade/bugs/4508058.html
*/
if (fBOM == IContentDescription.BOM_UTF_8 && CHARSET_UTF_8.equals(encoding))
stream = new SequenceInputStream(new ByteArrayInputStream(IContentDescription.BOM_UTF_8), stream);
if (fBOM == IContentDescription.BOM_UTF_16LE && CHARSET_UTF_16LE.equals(encoding))
stream = new SequenceInputStream(new ByteArrayInputStream(IContentDescription.BOM_UTF_16LE), stream);
if (fFile.exists()) {
// here the file synchronizer should actually be removed and afterwards added again. However,
// we are already inside an operation, so the delta is sent AFTER we have added the listener
fFile.setContents(stream, overwrite, true, monitor);
if (fDocument instanceof IDocumentExtension4) {
fSynchronizationStamp = ((IDocumentExtension4) fDocument).getModificationStamp();
fFile.revertModificationStamp(fSynchronizationStamp);
} else
fSynchronizationStamp = fFile.getModificationStamp();
if (fAnnotationModel instanceof IPersistableAnnotationModel) {
IPersistableAnnotationModel persistableModel = (IPersistableAnnotationModel) fAnnotationModel;
persistableModel.commit(fDocument);
}
} else {
SubMonitor subMonitor = SubMonitor.convert(monitor, FileBuffersMessages.ResourceTextFileBuffer_task_saving, 2);
ContainerCreator creator = new ContainerCreator(fFile.getWorkspace(), fFile.getParent().getFullPath());
creator.createContainer(subMonitor.split(1));
fFile.create(stream, false, subMonitor.split(1));
// set synchronization stamp to know whether the file synchronizer must become active
fSynchronizationStamp = fFile.getModificationStamp();
subMonitor.split(1);
// TODO commit persistable annotation model
}
}
use of java.io.SequenceInputStream in project pravega by pravega.
the class OperationLogTestBase method getExpectedContents.
/**
* Given a list of Log Operations, generates an InputStream for each encountered StreamSegment that contains the final
* contents of that StreamSegment. Only considers operations of type StreamSegmentAppendOperation and MergeTransactionOperation.
*/
private AbstractMap<Long, InputStream> getExpectedContents(Collection<OperationWithCompletion> operations) {
HashMap<Long, List<ByteArrayInputStream>> partialContents = new HashMap<>();
for (OperationWithCompletion o : operations) {
Assert.assertTrue("Operation is not completed.", o.completion.isDone());
if (o.completion.isCompletedExceptionally()) {
// This is failed operation; ignore it.
continue;
}
if (o.operation instanceof StreamSegmentAppendOperation) {
StreamSegmentAppendOperation appendOperation = (StreamSegmentAppendOperation) o.operation;
List<ByteArrayInputStream> segmentContents = partialContents.get(appendOperation.getStreamSegmentId());
if (segmentContents == null) {
segmentContents = new ArrayList<>();
partialContents.put(appendOperation.getStreamSegmentId(), segmentContents);
}
segmentContents.add(new ByteArrayInputStream(appendOperation.getData()));
} else if (o.operation instanceof MergeTransactionOperation) {
MergeTransactionOperation mergeOperation = (MergeTransactionOperation) o.operation;
List<ByteArrayInputStream> targetSegmentContents = partialContents.get(mergeOperation.getStreamSegmentId());
if (targetSegmentContents == null) {
targetSegmentContents = new ArrayList<>();
partialContents.put(mergeOperation.getStreamSegmentId(), targetSegmentContents);
}
List<ByteArrayInputStream> sourceSegmentContents = partialContents.get(mergeOperation.getTransactionSegmentId());
targetSegmentContents.addAll(sourceSegmentContents);
partialContents.remove(mergeOperation.getTransactionSegmentId());
}
}
// Construct final result.
HashMap<Long, InputStream> result = new HashMap<>();
for (Map.Entry<Long, List<ByteArrayInputStream>> e : partialContents.entrySet()) {
result.put(e.getKey(), new SequenceInputStream(Iterators.asEnumeration(e.getValue().iterator())));
}
return result;
}
use of java.io.SequenceInputStream in project pravega by pravega.
the class StreamSegmentReadIndex method getMultiReadResultEntry.
/**
* Returns a ReadResultEntry that matches the specified search parameters.
* <p>
* Compared to getSingleMemoryReadResultEntry(), this method may return a direct entry or a collection of entries.
* If the first entry to be returned would constitute a cache hit, then this method will attempt to return data from
* subsequent (congruent) entries, as long as they are cache hits. If at any time a cache miss occurs, the data collected
* so far is returned as a single entry, excluding the cache miss entry (exception if the first entry is a miss,
* then that entry is returned).
*
* @param resultStartOffset The Offset within the StreamSegment where to start returning data from.
* @param maxLength The maximum number of bytes to return.
* @return A ReadResultEntry representing the data to return.
*/
private CompletableReadResultEntry getMultiReadResultEntry(long resultStartOffset, int maxLength) {
int readLength = 0;
CompletableReadResultEntry nextEntry = getSingleReadResultEntry(resultStartOffset, maxLength);
if (nextEntry == null || !(nextEntry instanceof CacheReadResultEntry)) {
// We can only coalesce CacheReadResultEntries.
return nextEntry;
}
// Collect the contents of congruent Index Entries into a list, as long as we still encounter data in the cache.
ArrayList<InputStream> contents = new ArrayList<>();
do {
assert Futures.isSuccessful(nextEntry.getContent()) : "Found CacheReadResultEntry that is not completed yet: " + nextEntry;
val entryContents = nextEntry.getContent().join();
contents.add(entryContents.getData());
readLength += entryContents.getLength();
if (readLength >= this.config.getMemoryReadMinLength() || readLength >= maxLength) {
break;
}
nextEntry = getSingleMemoryReadResultEntry(resultStartOffset + readLength, maxLength - readLength);
} while (nextEntry != null);
// Coalesce the results into a single InputStream and return the result.
return new CacheReadResultEntry(resultStartOffset, new SequenceInputStream(Iterators.asEnumeration(contents.iterator())), readLength);
}
use of java.io.SequenceInputStream in project pravega by pravega.
the class StorageTestBase method testWrite.
/**
* Tests the write() method.
*
* @throws Exception if an unexpected error occurred.
*/
@Test
public void testWrite() throws Exception {
String segmentName = "foo_write";
int appendCount = 100;
try (Storage s = createStorage()) {
s.initialize(DEFAULT_EPOCH);
createSegment(segmentName, s);
// Invalid handle.
val readOnlyHandle = s.openRead(segmentName).join();
assertThrows("write() did not throw for read-only handle.", () -> s.write(readOnlyHandle, 0, new ByteArrayInputStream("h".getBytes()), 1, TIMEOUT), ex -> ex instanceof IllegalArgumentException);
assertThrows("write() did not throw for handle pointing to inexistent segment.", () -> s.write(createInexistentSegmentHandle(s, false), 0, new ByteArrayInputStream("h".getBytes()), 1, TIMEOUT), ex -> ex instanceof StreamSegmentNotExistsException);
val writeHandle = s.openWrite(segmentName).join();
long offset = 0;
for (int j = 0; j < appendCount; j++) {
byte[] writeData = String.format(APPEND_FORMAT, segmentName, j).getBytes();
// We intentionally add some garbage at the end of the dataStream to verify that write() takes into account
// the value of the "length" argument.
val dataStream = new SequenceInputStream(new ByteArrayInputStream(writeData), new ByteArrayInputStream(new byte[100]));
s.write(writeHandle, offset, dataStream, writeData.length, TIMEOUT).join();
offset += writeData.length;
}
// Check bad offset.
final long finalOffset = offset;
assertThrows("write() did not throw bad offset write (smaller).", () -> s.write(writeHandle, finalOffset - 1, new ByteArrayInputStream("h".getBytes()), 1, TIMEOUT), ex -> ex instanceof BadOffsetException);
assertThrows("write() did not throw bad offset write (larger).", () -> s.write(writeHandle, finalOffset + 1, new ByteArrayInputStream("h".getBytes()), 1, TIMEOUT), ex -> ex instanceof BadOffsetException);
// Check post-delete write.
s.delete(writeHandle, TIMEOUT).join();
assertThrows("write() did not throw for a deleted StreamSegment.", () -> s.write(writeHandle, 0, new ByteArrayInputStream(new byte[1]), 1, TIMEOUT), ex -> ex instanceof StreamSegmentNotExistsException);
}
}
Aggregations