use of org.syncany.chunk.Chunk in project syncany by syncany.
the class AssemblerTest method testAssembler.
/**
* Alters multichunk data to test whether integrity checks work.
*/
@Test
public void testAssembler() throws Exception {
LocalTransferSettings testConnection = (LocalTransferSettings) TestConfigUtil.createTestLocalConnection();
TestClient clientA = new TestClient("A", testConnection);
TestClient clientB = new TestClient("B", testConnection);
// small, only one chunk, one multichunk
clientA.createNewFile("file1.jpg", 20);
clientA.upWithForceChecksum();
// Read chunk from original multichunk
File repoMultiChunksFolder = new File(testConnection.getPath(), "multichunks");
File multiChunkFile = repoMultiChunksFolder.listFiles()[0];
MultiChunk multiChunk = clientA.getConfig().getMultiChunker().createMultiChunk(clientA.getConfig().getTransformer().createInputStream(new FileInputStream(multiChunkFile)));
Chunk chunk = multiChunk.read();
multiChunk.close();
// Flip byte in chunk and write new "altered" multichunk
File alteredMultiChunkFile = new File(multiChunkFile + "-altered");
MultiChunk alteredMultiChunk = clientA.getConfig().getMultiChunker().createMultiChunk(multiChunk.getId(), clientA.getConfig().getTransformer().createOutputStream(new FileOutputStream(alteredMultiChunkFile)));
// Flip one byte!
chunk.getContent()[0] ^= 0x01;
alteredMultiChunk.write(chunk);
alteredMultiChunk.close();
// Now delete old multichunk, and swap by "altered" file
multiChunkFile.delete();
FileUtils.moveFile(alteredMultiChunkFile, multiChunkFile);
boolean exceptionThrown = false;
try {
// If this does not throw an exception, it's bad!
clientB.down();
} catch (Exception e) {
exceptionThrown = true;
}
assertTrue(exceptionThrown);
clientA.deleteTestData();
clientB.deleteTestData();
}
use of org.syncany.chunk.Chunk in project syncany by syncany.
the class FixedOffsetChunkerTest method testCreateChunksFrom5MBFileAndTestChunkSize.
@Test
public void testCreateChunksFrom5MBFileAndTestChunkSize() throws Exception {
// Test Constants
final int TOTAL_FILE_SIZE = 5 * 1024 * 1024;
final int EXACT_CHUNK_SIZE = 512 * 1024;
final int EXPECTED_NUMBER_OF_CHUNKS = TOTAL_FILE_SIZE / EXACT_CHUNK_SIZE;
final int EXPECTED_CHUNK_SIZE = EXACT_CHUNK_SIZE;
// Setup
File inputRandom5MBFile = TestFileUtil.createRandomFileInDirectory(tempDir, TOTAL_FILE_SIZE);
File outputCopyOfRandom5MBFile = TestFileUtil.getRandomFilenameInDirectory(tempDir);
FileOutputStream outputCopyOfRandom5MBFileOutputStream = new FileOutputStream(outputCopyOfRandom5MBFile);
Chunker chunker = new FixedChunker(EXACT_CHUNK_SIZE, FixedChunker.DEFAULT_DIGEST_ALG);
// Create chunks
int actualChunkCount = 0;
Enumeration<Chunk> chunkEnumeration = chunker.createChunks(inputRandom5MBFile);
Chunk lastChunk = null;
while (chunkEnumeration.hasMoreElements()) {
actualChunkCount++;
lastChunk = chunkEnumeration.nextElement();
// Chunk size & checksum
assertEquals("Chunk does not have the expected size.", EXPECTED_CHUNK_SIZE, lastChunk.getSize());
assertNotNull("Chunk checksum should not be null.", lastChunk.getChecksum());
outputCopyOfRandom5MBFileOutputStream.write(lastChunk.getContent());
}
outputCopyOfRandom5MBFileOutputStream.close();
// Number of chunks
assertEquals("Unexpected number of chunks when chunking", EXPECTED_NUMBER_OF_CHUNKS, actualChunkCount);
// Checksums
byte[] inputFileChecksum = FileUtil.createChecksum(inputRandom5MBFile, FixedChunker.DEFAULT_DIGEST_ALG);
byte[] outputFileChecksum = FileUtil.createChecksum(outputCopyOfRandom5MBFile, FixedChunker.DEFAULT_DIGEST_ALG);
assertArrayEquals("Checksums of input and output file do not match.", inputFileChecksum, outputFileChecksum);
assertArrayEquals("Last chunk's getFileChecksum() should be the file checksum.", inputFileChecksum, lastChunk.getFileChecksum());
}
use of org.syncany.chunk.Chunk in project syncany by syncany.
the class FrameworkCombinationTest method deduplicateAndCreateChunkIndex.
private ChunkIndex deduplicateAndCreateChunkIndex(final List<File> inputFiles, FrameworkCombination combination) throws IOException {
logger.log(Level.INFO, "- Deduplicate and create chunk index ...");
final ChunkIndex chunkIndex = new ChunkIndex();
Deduper deduper = new Deduper(combination.chunker, combination.multiChunker, combination.transformer, Long.MAX_VALUE, Long.MAX_VALUE);
deduper.deduplicate(inputFiles, new DeduperListener() {
@Override
public void onMultiChunkWrite(MultiChunk multiChunk, Chunk chunk) {
logger.log(Level.INFO, " - Adding chunk " + StringUtil.toHex(chunk.getChecksum()) + " to multichunk " + multiChunk.getId() + " ...");
chunkIndex.chunkIDToMultiChunkID.put(new ChunkChecksum(chunk.getChecksum()), multiChunk.getId());
}
@Override
public void onFileAddChunk(File file, Chunk chunk) {
logger.log(Level.INFO, " - Adding chunk " + StringUtil.toHex(chunk.getChecksum()) + " to inputFileToChunkIDs-map for file " + file + " ...");
List<ChunkChecksum> chunkIDsForFile = chunkIndex.inputFileToChunkIDs.get(file);
if (chunkIDsForFile == null) {
chunkIDsForFile = new ArrayList<ChunkChecksum>();
}
chunkIDsForFile.add(new ChunkChecksum(chunk.getChecksum()));
chunkIndex.inputFileToChunkIDs.put(file, chunkIDsForFile);
}
@Override
public boolean onChunk(Chunk chunk) {
if (chunkIndex.chunkIDToMultiChunkID.containsKey(new ChunkChecksum(chunk.getChecksum()))) {
logger.log(Level.INFO, " + Known chunk " + StringUtil.toHex(chunk.getChecksum()));
return false;
} else {
logger.log(Level.INFO, " + New chunk " + StringUtil.toHex(chunk.getChecksum()));
return true;
}
}
@Override
public File getMultiChunkFile(MultiChunkId multiChunkId) {
File outputMultiChunk = new File(tempDir + "/multichunk-" + multiChunkId);
chunkIndex.outputMultiChunkFiles.add(outputMultiChunk);
return outputMultiChunk;
}
@Override
public MultiChunkId createNewMultiChunkId(Chunk firstChunk) {
// Note: In the real implementation, this should be random
return new MultiChunkId(firstChunk.getChecksum());
}
@Override
public boolean onFileFilter(File file) {
return true;
}
@Override
public boolean onFileStart(File file) {
return file.isFile() && !FileUtil.isSymlink(file);
}
@Override
public void onFileEnd(File file, byte[] checksum) {
// Empty
}
@Override
public void onMultiChunkOpen(MultiChunk multiChunk) {
// Empty
}
@Override
public void onMultiChunkClose(MultiChunk multiChunk) {
// Empty
}
@Override
public void onStart(int fileCount) {
// Empty
}
@Override
public void onFinish() {
// Empty
}
});
return chunkIndex;
}
use of org.syncany.chunk.Chunk in project syncany by syncany.
the class MultiChunkerTest method chunkFileIntoMultiChunks.
private Set<MultiChunk> chunkFileIntoMultiChunks(File tempDir, List<File> files, Chunker foc, MultiChunker customMultiChunker, Transformer transformer) throws IOException {
Set<MultiChunk> resultMultiChunks = new HashSet<MultiChunk>();
MultiChunk customMultiChunk = createNewMultiChunk(tempDir, customMultiChunker, transformer);
for (File file : files) {
Enumeration<Chunk> chunks = foc.createChunks(file);
while (chunks.hasMoreElements()) {
Chunk chunk = chunks.nextElement();
customMultiChunk.write(chunk);
if (customMultiChunk.isFull()) {
customMultiChunk.close();
resultMultiChunks.add(customMultiChunk);
customMultiChunk = createNewMultiChunk(tempDir, customMultiChunker, transformer);
}
}
}
customMultiChunk.close();
resultMultiChunks.add(customMultiChunk);
return resultMultiChunks;
}
use of org.syncany.chunk.Chunk in project syncany by syncany.
the class TTTDChunkerTest method testNextChunkEvenIfThereAreNone.
@Test
public void testNextChunkEvenIfThereAreNone() throws IOException {
// Test Constants
final int TOTAL_FILE_SIZE = 5 * 1024;
final int CHUNK_SIZE = 512 * 1024;
// Setup
File inputFile = TestFileUtil.createRandomFileInDirectory(tempDir, TOTAL_FILE_SIZE);
Chunker chunker = new TttdChunker(CHUNK_SIZE);
// Create chunks
Enumeration<Chunk> chunkEnumeration = chunker.createChunks(inputFile);
while (chunkEnumeration.hasMoreElements()) {
chunkEnumeration.nextElement();
}
// This should lead to an IOException
assertNull("No chunk expected, but data received.", chunkEnumeration.nextElement());
assertFalse("hasElements() should return 'false' if no chunk available.", chunkEnumeration.hasMoreElements());
}
Aggregations