use of com.orientechnologies.orient.core.record.impl.ORecordBytes in project orientdb by orientechnologies.
the class CRUDObjectPhysicalTest method testObjectDelete.
@Test(dependsOnMethods = "testAddingORecordBytesAfterParentCreation")
public void testObjectDelete() {
Media media = new Media();
OBlob testRecord = new ORecordBytes("This is a test".getBytes());
media.setContent(testRecord);
media = database.save(media);
Assert.assertEquals(new String(media.getContent().toStream()), "This is a test");
// try to delete
database.delete(media);
}
use of com.orientechnologies.orient.core.record.impl.ORecordBytes in project orientdb by orientechnologies.
the class BinaryTest method testMixedCreateExternal.
@Test(dependsOnMethods = "testBasicReadExternal")
public void testMixedCreateExternal() {
ODocument doc = new ODocument();
doc.field("binary", new ORecordBytes(database, "Binary data".getBytes()));
doc.save();
rid = doc.getIdentity();
}
use of com.orientechnologies.orient.core.record.impl.ORecordBytes in project orientdb by orientechnologies.
the class LocalDocumentAndBinarySpeedTest method saveLotOfMixedData.
@Test
public void saveLotOfMixedData() {
database.open(DEFAULT_DB_USER, DEFAULT_DB_PASSWORD);
OClass chunk = database.getMetadata().getSchema().createClass("Chunk");
index = chunk.createProperty("hash", OType.STRING).createIndex(OClass.INDEX_TYPE.UNIQUE);
chunk.createProperty("binary", OType.LINK);
try {
byte[] data = new byte[size];
for (int i = 0; i < size; i++) {
data[i] = (byte) (i % 255);
}
ODocument doc = new ODocument("Chunk");
for (int i = 0; i < count; i++) {
doc.reset();
doc.setClassName("Chunk");
doc.field("hash", "key" + Integer.toString(i));
doc.field("binary", new ORecordBytes(database, data));
doc.save();
ORID rid = doc.getIdentity();
if (i % 100 == 0)
System.out.println("ORID=" + rid);
}
} finally {
database.close();
}
}
use of com.orientechnologies.orient.core.record.impl.ORecordBytes in project orientdb by orientechnologies.
the class GiantFileTest method storeFileData.
private static void storeFileData(final ODocument fileDoc, final File file) throws Exception {
// To avoid overwriting a stored file, DataChunks must be null.
final List<ORID> existingChunks = fileDoc.field("DataChunks");
if (existingChunks != null) {
final String fileName = fileDoc.field("FileName");
throw new RuntimeException("File record already has data; overwrite not allowed! fileName: " + fileName);
}
// TODO: is this assumption ok?
// Get the currently open database for this thread and set intent.
final ODatabase database = ODatabaseRecordThreadLocal.INSTANCE.get();
database.declareIntent(new OIntentMassiveInsert());
// Insert File data.
final long fileSize = file.length();
final FileInputStream in = new FileInputStream(file);
try {
final int CHUNK_SIZE = 81920;
int bufferedBytes;
final byte[] buffer = new byte[CHUNK_SIZE];
byte currentPercent = 0;
final int fullChunks = (int) (fileSize / CHUNK_SIZE);
final long fullChunksSize = fullChunks * CHUNK_SIZE;
final int totalChunks;
if (fileSize > fullChunksSize) {
totalChunks = fullChunks + 1;
} else {
totalChunks = fullChunks;
}
final List<ORID> chunkRids = new ArrayList<ORID>(totalChunks);
// Make only one ORecordBytes instance and reuse it for every chunk,
// to reduce heap garbage.
final ORecordBytes chunk = new ORecordBytes();
// Handle the full chunks.
for (int page = 0; page < fullChunks; page++) {
// Read a full chunk of data from the file into a buffer.
bufferedBytes = 0;
while (bufferedBytes < buffer.length) {
final int bytesRead = in.read(buffer, bufferedBytes, buffer.length - bufferedBytes);
if (bytesRead == -1) {
throw new Exception("Reached end of file prematurely. (File changed while reading?) fileName=" + file.getAbsolutePath());
}
bufferedBytes += bytesRead;
}
// Save the chunk to the database.
final long saveStartTime = System.currentTimeMillis();
chunk.reset(buffer);
chunk.save();
final long saveMs = System.currentTimeMillis() - saveStartTime;
// Log the amount of time taken by the save.
System.out.printf("Saved chunk %d in %d ms.\n", page, saveMs);
// Save the chunk's record ID in the list.
// Have to copy() the ORID or else every chunk in the list gets the same last ORID.
// This is because we are using the chunk.reset(); approach to reduce garbage objects.
chunkRids.add(chunk.getIdentity().copy());
// Only report progress if it has changed.
final byte percent = (byte) ((page + 1) * 100 / totalChunks);
if (percent > currentPercent) {
System.out.printf("Progress: %d%%\n", percent);
currentPercent = percent;
}
}
// Handle the final partial chunk (if any).
if (fullChunks < totalChunks) {
final int remainder = (int) (fileSize - fullChunksSize);
// Read the remaining data from the file into a buffer.
bufferedBytes = 0;
while (bufferedBytes < remainder) {
final int bytesRead = in.read(buffer, bufferedBytes, remainder - bufferedBytes);
if (bytesRead == -1) {
throw new Exception("Reached end of file prematurely. (File changed while reading?) fileName=" + file.getAbsolutePath());
}
bufferedBytes += bytesRead;
}
// Save the chunk to the database.
final long saveStartTime = System.currentTimeMillis();
chunk.reset(Arrays.copyOf(buffer, remainder));
chunk.save();
final long saveMs = System.currentTimeMillis() - saveStartTime;
// Log the amount of time taken by the save.
System.out.printf("Saved partial chunk %d in %d ms.\n", fullChunks, saveMs);
// Save the chunk's record ID in the list.
chunkRids.add(chunk.getIdentity());
}
// Should be no more data, so validate this.
final int b = in.read();
if (b != -1) {
throw new Exception("File changed while saving to database! fileName=" + file.getAbsolutePath());
}
// Report 100% progress if we haven't already.
if (currentPercent < 100) {
System.out.println("Progress: 100%");
}
// Save the list of chunk references.
final long saveChunkListStartTime = System.currentTimeMillis();
fileDoc.field("DataChunks", chunkRids);
fileDoc.save();
final long saveChunkListMs = System.currentTimeMillis() - saveChunkListStartTime;
// Log the amount of time taken to save the list of chunk RIDs.
System.out.printf("Saved list of %d chunk RIDs in %d ms.\n", chunkRids.size(), saveChunkListMs);
} finally {
database.declareIntent(null);
in.close();
}
}
use of com.orientechnologies.orient.core.record.impl.ORecordBytes in project orientdb by orientechnologies.
the class TransactionOptimisticTest method testTransactionOptimisticCommit.
@Test(dependsOnMethods = "testTransactionOptimisticRollback")
public void testTransactionOptimisticCommit() throws IOException {
if (database.getClusterIdByName("binary") == -1)
database.addBlobCluster("binary");
long tot = database.countClusterElements("binary");
database.begin();
OBlob recordBytes = new ORecordBytes("This is the first version".getBytes());
recordBytes.save("binary");
database.commit();
Assert.assertEquals(database.countClusterElements("binary"), tot + 1);
}
Aggregations