use of com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream in project intellij-community by JetBrains.
the class StubUpdatingIndex method getIndexer.
@NotNull
@Override
public DataIndexer<Integer, SerializedStubTree, FileContent> getIndexer() {
return new DataIndexer<Integer, SerializedStubTree, FileContent>() {
@Override
@NotNull
public Map<Integer, SerializedStubTree> map(@NotNull final FileContent inputData) {
final Map<Integer, SerializedStubTree> result = new THashMap<Integer, SerializedStubTree>() {
StubUpdatingIndexKeys myKeySet;
@Override
public Set<Integer> keySet() {
if (myKeySet == null) {
myKeySet = new StubUpdatingIndexKeys(super.keySet());
}
return myKeySet;
}
};
ApplicationManager.getApplication().runReadAction(() -> {
final Stub rootStub = StubTreeBuilder.buildStubTree(inputData);
if (rootStub == null)
return;
VirtualFile file = inputData.getFile();
int contentLength;
if (file.getFileType().isBinary()) {
contentLength = -1;
} else {
contentLength = ((FileContentImpl) inputData).getPsiFileForPsiDependentIndex().getTextLength();
}
rememberIndexingStamp(file, contentLength);
final BufferExposingByteArrayOutputStream bytes = new BufferExposingByteArrayOutputStream();
SerializationManagerEx.getInstanceEx().serialize(rootStub, bytes);
if (DebugAssertions.DEBUG) {
try {
Stub deserialized = SerializationManagerEx.getInstanceEx().deserialize(new ByteArrayInputStream(bytes.getInternalBuffer(), 0, bytes.size()));
check(deserialized, rootStub);
} catch (ProcessCanceledException pce) {
throw pce;
} catch (Throwable t) {
LOG.error("Error indexing:" + file, t);
}
}
final int key = Math.abs(FileBasedIndex.getFileId(file));
SerializedStubTree serializedStubTree = new SerializedStubTree(bytes.getInternalBuffer(), bytes.size(), rootStub, file.getLength(), contentLength);
result.put(key, serializedStubTree);
try {
((StubUpdatingIndexKeys) result.keySet()).myStubIndicesValueMap = calcStubIndicesValueMap(serializedStubTree, key);
} catch (StorageException ex) {
throw new RuntimeException(ex);
}
});
return result;
}
};
}
use of com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream in project intellij-community by JetBrains.
the class RefCountingStorage method zipAndWrite.
private void zipAndWrite(ByteSequence bytes, int record, boolean fixedSize) throws IOException {
BufferExposingByteArrayOutputStream s = new BufferExposingByteArrayOutputStream();
DeflaterOutputStream out = new DeflaterOutputStream(s);
try {
out.write(bytes.getBytes(), bytes.getOffset(), bytes.getLength());
} finally {
out.close();
}
synchronized (myLock) {
doWrite(record, fixedSize, s);
myPendingWriteRequestsSize -= bytes.getLength();
myPendingWriteRequests.remove(record);
}
}
use of com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream in project intellij-community by JetBrains.
the class CompressedAppendableFile method saveNextChunkIfNeeded.
private void saveNextChunkIfNeeded() throws IOException {
if (myBufferPosition == myNextChunkBuffer.length) {
BufferExposingByteArrayOutputStream compressedOut = new BufferExposingByteArrayOutputStream();
DataOutputStream compressedDataOut = new DataOutputStream(compressedOut);
compress(compressedDataOut, myNextChunkBuffer);
compressedDataOut.close();
// we need to be in short range for chunk length table
assert compressedDataOut.size() <= MAX_PAGE_LENGTH;
saveChunk(compressedOut, myFileLength);
myBufferPosition = 0;
initChunkLengthTable();
myFileLength += compressedOut.size();
if (doDebug)
myCompressedChunksFileOffsets.add(myFileLength);
if (myChunkLengthTable.length == myChunkTableLength) {
myChunkLengthTable = reallocShortTable(myChunkLengthTable);
}
myChunkLengthTable[myChunkTableLength++] = (short) compressedOut.size();
if (myChunkTableLength / FACTOR > myChunkOffsetTable.length) {
long[] newChunkOffsetTable = new long[myChunkOffsetTable.length + 1];
System.arraycopy(myChunkOffsetTable, 0, newChunkOffsetTable, 0, myChunkOffsetTable.length);
newChunkOffsetTable[myChunkOffsetTable.length] = myFileLength;
myChunkOffsetTable = newChunkOffsetTable;
}
byte[] bytes = new byte[myAppendBufferLength];
System.arraycopy(myNextChunkBuffer, 0, bytes, 0, myAppendBufferLength);
ourDecompressedCache.put(this, myChunkTableLength - 1, bytes);
}
}
use of com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream in project intellij-community by JetBrains.
the class MapReduceIndex method checkValuesHaveProperEqualsAndHashCode.
public static <Key, Value> void checkValuesHaveProperEqualsAndHashCode(@NotNull Map<Key, Value> data, @NotNull ID<Key, Value> indexId, @NotNull DataExternalizer<Value> valueExternalizer) {
if (DebugAssertions.DEBUG) {
for (Map.Entry<Key, Value> e : data.entrySet()) {
final Value value = e.getValue();
if (!(Comparing.equal(value, value) && (value == null || value.hashCode() == value.hashCode()))) {
LOG.error("Index " + indexId + " violates equals / hashCode contract for Value parameter");
}
try {
final BufferExposingByteArrayOutputStream out = new BufferExposingByteArrayOutputStream();
DataOutputStream outputStream = new DataOutputStream(out);
valueExternalizer.save(outputStream, value);
outputStream.close();
final Value deserializedValue = valueExternalizer.read(new DataInputStream(new UnsyncByteArrayInputStream(out.getInternalBuffer(), 0, out.size())));
if (!(Comparing.equal(value, deserializedValue) && (value == null || value.hashCode() == deserializedValue.hashCode()))) {
LOG.error("Index " + indexId + " deserialization violates equals / hashCode contract for Value parameter");
}
} catch (IOException ex) {
LOG.error(ex);
}
}
}
}
use of com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream in project intellij-community by JetBrains.
the class AppendableStorageBackedByResizableMappedFile method append.
public <Data> int append(Data value, KeyDescriptor<Data> descriptor) throws IOException {
final BufferExposingByteArrayOutputStream bos = new BufferExposingByteArrayOutputStream();
DataOutput out = new DataOutputStream(bos);
descriptor.save(out, value);
final int size = bos.size();
final byte[] buffer = bos.getInternalBuffer();
int currentLength = getCurrentLength();
if (myCompressedAppendableFile != null) {
//myCompressedAppendableFile.append(value, descriptor);
myCompressedAppendableFile.append(buffer, size);
if (!testMode)
return currentLength;
}
if (size > ourAppendBufferLength) {
flushKeyStoreBuffer();
put(currentLength, buffer, 0, size);
myFileLength += size;
} else {
if (size > ourAppendBufferLength - myBufferPosition) {
flushKeyStoreBuffer();
}
// myAppendBuffer will contain complete records
if (myAppendBuffer == null) {
myAppendBuffer = new byte[ourAppendBufferLength];
}
System.arraycopy(buffer, 0, myAppendBuffer, myBufferPosition, size);
myBufferPosition += size;
}
return currentLength;
}
Aggregations