use of org.syncany.database.MultiChunkEntry.MultiChunkId in project syncany by syncany.
the class DatabaseVersionSqlDao method createDatabaseVersionFromRow.
protected DatabaseVersion createDatabaseVersionFromRow(ResultSet resultSet, boolean excludeChunkData, int fileHistoryMaxCount) throws SQLException {
DatabaseVersionHeader databaseVersionHeader = createDatabaseVersionHeaderFromRow(resultSet);
DatabaseVersion databaseVersion = new DatabaseVersion();
databaseVersion.setHeader(databaseVersionHeader);
// Add chunk/multichunk/filecontent data
if (!excludeChunkData) {
Map<ChunkChecksum, ChunkEntry> chunks = chunkDao.getChunks(databaseVersionHeader.getVectorClock());
Map<MultiChunkId, MultiChunkEntry> multiChunks = multiChunkDao.getMultiChunks(databaseVersionHeader.getVectorClock());
Map<FileChecksum, FileContent> fileContents = fileContentDao.getFileContents(databaseVersionHeader.getVectorClock());
for (ChunkEntry chunk : chunks.values()) {
databaseVersion.addChunk(chunk);
}
for (MultiChunkEntry multiChunk : multiChunks.values()) {
databaseVersion.addMultiChunk(multiChunk);
}
for (FileContent fileContent : fileContents.values()) {
databaseVersion.addFileContent(fileContent);
}
}
// Add file histories
Map<FileHistoryId, PartialFileHistory> fileHistories = fileHistoryDao.getFileHistoriesWithFileVersions(databaseVersionHeader.getVectorClock(), fileHistoryMaxCount);
for (PartialFileHistory fileHistory : fileHistories.values()) {
databaseVersion.addFileHistory(fileHistory);
}
return databaseVersion;
}
use of org.syncany.database.MultiChunkEntry.MultiChunkId in project syncany by syncany.
the class FrameworkCombinationTest method deduplicateAndCreateChunkIndex.
private ChunkIndex deduplicateAndCreateChunkIndex(final List<File> inputFiles, FrameworkCombination combination) throws IOException {
logger.log(Level.INFO, "- Deduplicate and create chunk index ...");
final ChunkIndex chunkIndex = new ChunkIndex();
Deduper deduper = new Deduper(combination.chunker, combination.multiChunker, combination.transformer, Long.MAX_VALUE, Long.MAX_VALUE);
deduper.deduplicate(inputFiles, new DeduperListener() {
@Override
public void onMultiChunkWrite(MultiChunk multiChunk, Chunk chunk) {
logger.log(Level.INFO, " - Adding chunk " + StringUtil.toHex(chunk.getChecksum()) + " to multichunk " + multiChunk.getId() + " ...");
chunkIndex.chunkIDToMultiChunkID.put(new ChunkChecksum(chunk.getChecksum()), multiChunk.getId());
}
@Override
public void onFileAddChunk(File file, Chunk chunk) {
logger.log(Level.INFO, " - Adding chunk " + StringUtil.toHex(chunk.getChecksum()) + " to inputFileToChunkIDs-map for file " + file + " ...");
List<ChunkChecksum> chunkIDsForFile = chunkIndex.inputFileToChunkIDs.get(file);
if (chunkIDsForFile == null) {
chunkIDsForFile = new ArrayList<ChunkChecksum>();
}
chunkIDsForFile.add(new ChunkChecksum(chunk.getChecksum()));
chunkIndex.inputFileToChunkIDs.put(file, chunkIDsForFile);
}
@Override
public boolean onChunk(Chunk chunk) {
if (chunkIndex.chunkIDToMultiChunkID.containsKey(new ChunkChecksum(chunk.getChecksum()))) {
logger.log(Level.INFO, " + Known chunk " + StringUtil.toHex(chunk.getChecksum()));
return false;
} else {
logger.log(Level.INFO, " + New chunk " + StringUtil.toHex(chunk.getChecksum()));
return true;
}
}
@Override
public File getMultiChunkFile(MultiChunkId multiChunkId) {
File outputMultiChunk = new File(tempDir + "/multichunk-" + multiChunkId);
chunkIndex.outputMultiChunkFiles.add(outputMultiChunk);
return outputMultiChunk;
}
@Override
public MultiChunkId createNewMultiChunkId(Chunk firstChunk) {
// Note: In the real implementation, this should be random
return new MultiChunkId(firstChunk.getChecksum());
}
@Override
public boolean onFileFilter(File file) {
return true;
}
@Override
public boolean onFileStart(File file) {
return file.isFile() && !FileUtil.isSymlink(file);
}
@Override
public void onFileEnd(File file, byte[] checksum) {
// Empty
}
@Override
public void onMultiChunkOpen(MultiChunk multiChunk) {
// Empty
}
@Override
public void onMultiChunkClose(MultiChunk multiChunk) {
// Empty
}
@Override
public void onStart(int fileCount) {
// Empty
}
@Override
public void onFinish() {
// Empty
}
});
return chunkIndex;
}
use of org.syncany.database.MultiChunkEntry.MultiChunkId in project syncany by syncany.
the class MultiChunkerTest method createNewMultiChunk.
private MultiChunk createNewMultiChunk(File tempDir, MultiChunker customMultiChunker, Transformer transformer) {
FileOutputStream fos;
MultiChunkId multiChunkId = MultiChunkId.secureRandomMultiChunkId();
MultiChunk customChunk = null;
try {
fos = new FileOutputStream(tempDir.getAbsolutePath() + "/MultiChunk" + multiChunkId);
customChunk = customMultiChunker.createMultiChunk(multiChunkId, transformer.createOutputStream(fos));
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
assertEquals(customChunk.getId(), multiChunkId);
return customChunk;
}
use of org.syncany.database.MultiChunkEntry.MultiChunkId in project syncany by syncany.
the class CleanupOperation method removeOldVersions.
/**
* This method checks if there exist {@link FileVersion}s which are to be deleted because the history they are a part
* of is too long. It will collect these, remove them locally and add them to the {@link RemoteTransaction} for deletion.
*/
private void removeOldVersions() throws Exception {
Map<FileHistoryId, List<FileVersion>> purgeFileVersions = new TreeMap<FileHistoryId, List<FileVersion>>();
Map<FileHistoryId, FileVersion> purgeBeforeFileVersions = new TreeMap<FileHistoryId, FileVersion>();
if (options.isRemoveVersionsByInterval()) {
// Get file versions that should be purged according to the settings that are given. Time-based.
purgeFileVersions = collectPurgableFileVersions();
}
if (options.isRemoveOldVersions()) {
// Get all non-final fileversions and deleted (final) fileversions that we want to fully delete.
// purgeFileVersions is modified here!
purgeBeforeFileVersions = collectPurgeBeforeFileVersions(purgeFileVersions);
}
if (purgeFileVersions.isEmpty() && purgeBeforeFileVersions.isEmpty()) {
logger.log(Level.INFO, "- Old version removal: Not necessary.");
return;
}
logger.log(Level.INFO, "- Old version removal: Found {0} file histories and {1} file versions that need cleaning.", new Object[] { purgeFileVersions.size(), purgeBeforeFileVersions.size() });
// Local: First, remove file versions that are not longer needed
localDatabase.removeSmallerOrEqualFileVersions(purgeBeforeFileVersions);
localDatabase.removeFileVersions(purgeFileVersions);
// Local: Then, determine what must be changed remotely and remove it locally
Map<MultiChunkId, MultiChunkEntry> unusedMultiChunks = localDatabase.getUnusedMultiChunks();
localDatabase.removeUnreferencedDatabaseEntities();
deleteUnusedRemoteMultiChunks(unusedMultiChunks);
// Update stats
long unusedMultiChunkSize = 0;
for (MultiChunkEntry removedMultiChunk : unusedMultiChunks.values()) {
unusedMultiChunkSize += removedMultiChunk.getSize();
}
result.setRemovedOldVersionsCount(purgeBeforeFileVersions.size() + purgeFileVersions.size());
result.setRemovedMultiChunksCount(unusedMultiChunks.size());
result.setRemovedMultiChunksSize(unusedMultiChunkSize);
}
use of org.syncany.database.MultiChunkEntry.MultiChunkId in project syncany by syncany.
the class DatabaseXmlParseHandler method startElement.
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
elementPath += "/" + qName;
if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion")) {
databaseVersion = new DatabaseVersion();
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/time")) {
Date timeValue = new Date(Long.parseLong(attributes.getValue("value")));
databaseVersion.setTimestamp(timeValue);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/client")) {
String clientName = attributes.getValue("name");
databaseVersion.setClient(clientName);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/vectorClock")) {
vectorClock = new VectorClock();
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/vectorClock/client")) {
String clientName = attributes.getValue("name");
Long clientValue = Long.parseLong(attributes.getValue("value"));
vectorClock.setClock(clientName, clientValue);
} else if (readType == DatabaseReadType.FULL) {
if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/chunks/chunk")) {
String chunkChecksumStr = attributes.getValue("checksum");
ChunkChecksum chunkChecksum = ChunkChecksum.parseChunkChecksum(chunkChecksumStr);
int chunkSize = Integer.parseInt(attributes.getValue("size"));
ChunkEntry chunkEntry = new ChunkEntry(chunkChecksum, chunkSize);
databaseVersion.addChunk(chunkEntry);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileContents/fileContent")) {
String checksumStr = attributes.getValue("checksum");
long size = Long.parseLong(attributes.getValue("size"));
fileContent = new FileContent();
fileContent.setChecksum(FileChecksum.parseFileChecksum(checksumStr));
fileContent.setSize(size);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileContents/fileContent/chunkRefs/chunkRef")) {
String chunkChecksumStr = attributes.getValue("ref");
fileContent.addChunk(ChunkChecksum.parseChunkChecksum(chunkChecksumStr));
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/multiChunks/multiChunk")) {
String multChunkIdStr = attributes.getValue("id");
MultiChunkId multiChunkId = MultiChunkId.parseMultiChunkId(multChunkIdStr);
long size = Long.parseLong(attributes.getValue("size"));
if (multiChunkId == null) {
throw new SAXException("Cannot read ID from multichunk " + multChunkIdStr);
}
multiChunk = new MultiChunkEntry(multiChunkId, size);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/multiChunks/multiChunk/chunkRefs/chunkRef")) {
String chunkChecksumStr = attributes.getValue("ref");
multiChunk.addChunk(ChunkChecksum.parseChunkChecksum(chunkChecksumStr));
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileHistories/fileHistory")) {
String fileHistoryIdStr = attributes.getValue("id");
FileHistoryId fileId = FileHistoryId.parseFileId(fileHistoryIdStr);
fileHistory = new PartialFileHistory(fileId);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileHistories/fileHistory/fileVersions/fileVersion")) {
String fileVersionStr = attributes.getValue("version");
String path = attributes.getValue("path");
String pathEncoded = attributes.getValue("pathEncoded");
String sizeStr = attributes.getValue("size");
String typeStr = attributes.getValue("type");
String statusStr = attributes.getValue("status");
String lastModifiedStr = attributes.getValue("lastModified");
String updatedStr = attributes.getValue("updated");
String checksumStr = attributes.getValue("checksum");
String linkTarget = attributes.getValue("linkTarget");
String dosAttributes = attributes.getValue("dosattrs");
String posixPermissions = attributes.getValue("posixperms");
if (fileVersionStr == null || (path == null && pathEncoded == null) || typeStr == null || statusStr == null || sizeStr == null || lastModifiedStr == null) {
throw new SAXException("FileVersion: Attributes missing: version, path/pathEncoded, type, status, size and last modified are mandatory");
}
// Filter it if it was purged somewhere in the future, see #58
Long fileVersionNum = Long.parseLong(fileVersionStr);
// Go add it!
FileVersion fileVersion = new FileVersion();
fileVersion.setVersion(fileVersionNum);
if (path != null) {
fileVersion.setPath(path);
} else {
try {
fileVersion.setPath(new String(Base64.decodeBase64(pathEncoded), "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("Invalid Base64 encoding for filename: " + pathEncoded);
}
}
fileVersion.setType(FileType.valueOf(typeStr));
fileVersion.setStatus(FileStatus.valueOf(statusStr));
fileVersion.setSize(Long.parseLong(sizeStr));
fileVersion.setLastModified(new Date(Long.parseLong(lastModifiedStr)));
if (updatedStr != null) {
fileVersion.setUpdated(new Date(Long.parseLong(updatedStr)));
}
if (checksumStr != null) {
fileVersion.setChecksum(FileChecksum.parseFileChecksum(checksumStr));
}
if (linkTarget != null) {
fileVersion.setLinkTarget(linkTarget);
}
if (dosAttributes != null) {
fileVersion.setDosAttributes(dosAttributes);
}
if (posixPermissions != null) {
fileVersion.setPosixPermissions(posixPermissions);
}
fileHistory.addFileVersion(fileVersion);
}
}
}
Aggregations