use of com.intellij.openapi.util.io.ByteSequence in project intellij-community by JetBrains.
the class SnapshotInputMappings method saveContents.
private void saveContents(int id, BufferExposingByteArrayOutputStream out) throws IOException {
ByteSequence byteSequence = new ByteSequence(out.getInternalBuffer(), 0, out.size());
if (SharedIndicesData.ourFileSharedIndicesEnabled) {
if (SharedIndicesData.DO_CHECKS) {
synchronized (myContents) {
myContents.put(id, byteSequence);
SharedIndicesData.associateContentData(id, myIndexId, byteSequence, ByteSequenceDataExternalizer.INSTANCE);
}
} else {
SharedIndicesData.associateContentData(id, myIndexId, byteSequence, ByteSequenceDataExternalizer.INSTANCE);
}
} else {
myContents.put(id, byteSequence);
}
}
use of com.intellij.openapi.util.io.ByteSequence in project intellij-community by JetBrains.
the class SnapshotInputMappings method readContents.
private ByteSequence readContents(Integer hashId) throws IOException {
if (SharedIndicesData.ourFileSharedIndicesEnabled) {
if (SharedIndicesData.DO_CHECKS) {
synchronized (myContents) {
ByteSequence contentBytes = SharedIndicesData.recallContentData(hashId, myIndexId, ByteSequenceDataExternalizer.INSTANCE);
ByteSequence contentBytesFromContents = myContents.get(hashId);
if ((contentBytes == null && contentBytesFromContents != null) || !Comparing.equal(contentBytesFromContents, contentBytes)) {
SharedIndicesData.associateContentData(hashId, myIndexId, contentBytesFromContents, ByteSequenceDataExternalizer.INSTANCE);
if (contentBytes != null) {
LOG.error("Unexpected indexing diff with hashid " + myIndexId + "," + hashId);
}
contentBytes = contentBytesFromContents;
}
return contentBytes;
}
} else {
return SharedIndicesData.recallContentData(hashId, myIndexId, ByteSequenceDataExternalizer.INSTANCE);
}
}
return myContents.get(hashId);
}
use of com.intellij.openapi.util.io.ByteSequence in project intellij-community by JetBrains.
the class StorageTest method testStress.
public void testStress() throws Exception {
StringBuffer data = new StringBuffer();
for (int i = 0; i < 100; i++) {
data.append("Hello ");
}
String hello = data.toString();
long start = System.currentTimeMillis();
final int count = 100000;
int[] records = new int[count];
for (int i = 0; i < count; i++) {
final int record = myStorage.createNewRecord();
// fixed size optimization is mor than 50 percents here!
myStorage.writeBytes(record, new ByteSequence(hello.getBytes()), true);
records[i] = record;
}
for (int record : records) {
assertEquals(hello, new String(myStorage.readBytes(record)));
}
long timedelta = System.currentTimeMillis() - start;
System.out.println("Done for " + timedelta + "msec.");
}
use of com.intellij.openapi.util.io.ByteSequence in project intellij-community by JetBrains.
the class FileTypeManagerImpl method processFirstBytes.
private boolean processFirstBytes(@NotNull final InputStream stream, final int length, @NotNull Processor<ByteSequence> processor) throws IOException {
final byte[] bytes = FileUtilRt.getThreadLocalBuffer();
assert bytes.length >= length : "Cannot process more than " + bytes.length + " in one call, requested:" + length;
int n = stream.read(bytes, 0, length);
if (n <= 0) {
// repeat inside read action to guarantee all writes are finished
if (toLog()) {
log("F: processFirstBytes(): inputStream.read() returned " + n + "; retrying with read action. stream=" + streamInfo(stream));
}
n = ApplicationManager.getApplication().runReadAction((ThrowableComputable<Integer, IOException>) () -> stream.read(bytes, 0, length));
if (toLog()) {
log("F: processFirstBytes(): under read action inputStream.read() returned " + n + "; stream=" + streamInfo(stream));
}
if (n <= 0) {
return false;
}
}
return processor.process(new ByteSequence(bytes, 0, n));
}
use of com.intellij.openapi.util.io.ByteSequence in project intellij-community by JetBrains.
the class AbstractStorage method appendBytes.
protected void appendBytes(int record, ByteSequence bytes) throws IOException {
final int delta = bytes.getLength();
if (delta == 0)
return;
synchronized (myLock) {
int capacity = myRecordsTable.getCapacity(record);
int oldSize = myRecordsTable.getSize(record);
int newSize = oldSize + delta;
if (newSize > capacity) {
if (oldSize > 0) {
final byte[] newbytes = new byte[newSize];
System.arraycopy(readBytes(record), 0, newbytes, 0, oldSize);
System.arraycopy(bytes.getBytes(), bytes.getOffset(), newbytes, oldSize, delta);
writeBytes(record, new ByteSequence(newbytes), false);
} else {
writeBytes(record, bytes, false);
}
} else {
long address = myRecordsTable.getAddress(record) + oldSize;
myDataTable.writeBytes(address, bytes.getBytes(), bytes.getOffset(), bytes.getLength());
myRecordsTable.setSize(record, newSize);
}
}
}
Aggregations