use of org.syncany.database.PartialFileHistory.FileHistoryId in project syncany by syncany.
the class CleanupOperation method collectPurgableFileVersions.
/**
* For all time intervals defined in the purge file settings, determine the eligible file
* versions to be purged -- namely all but the newest one.
*
* @see CleanupOperation
* @see CleanupOperationOptions#getPurgeFileVersionSettings()
* @see FileVersionSqlDao#getFileHistoriesToPurgeInInterval(long, long, TimeUnit)
*/
private Map<FileHistoryId, List<FileVersion>> collectPurgableFileVersions() {
Map<FileHistoryId, List<FileVersion>> purgeFileVersions = new HashMap<FileHistoryId, List<FileVersion>>();
long currentTime = System.currentTimeMillis();
long previousTruncateIntervalTimeMultiplier = 0;
for (Map.Entry<Long, TimeUnit> purgeFileVersionSetting : options.getPurgeFileVersionSettings().entrySet()) {
Long truncateIntervalMultiplier = purgeFileVersionSetting.getKey();
TimeUnit truncateIntervalTimeUnit = purgeFileVersionSetting.getValue();
long beginIntervalTimestamp = currentTime - truncateIntervalMultiplier * 1000;
long endIntervalTimestamp = currentTime - previousTruncateIntervalTimeMultiplier * 1000;
Map<FileHistoryId, List<FileVersion>> newPurgeFileVersions = localDatabase.getFileHistoriesToPurgeInInterval(beginIntervalTimestamp, endIntervalTimestamp, truncateIntervalTimeUnit);
putAllFileVersionsInMap(newPurgeFileVersions, purgeFileVersions);
previousTruncateIntervalTimeMultiplier = truncateIntervalMultiplier;
}
return purgeFileVersions;
}
use of org.syncany.database.PartialFileHistory.FileHistoryId in project syncany by syncany.
the class CleanupOperation method removeOldVersions.
/**
* This method checks if there exist {@link FileVersion}s which are to be deleted because the history they are a part
* of is too long. It will collect these, remove them locally and add them to the {@link RemoteTransaction} for deletion.
*/
private void removeOldVersions() throws Exception {
Map<FileHistoryId, List<FileVersion>> purgeFileVersions = new TreeMap<FileHistoryId, List<FileVersion>>();
Map<FileHistoryId, FileVersion> purgeBeforeFileVersions = new TreeMap<FileHistoryId, FileVersion>();
if (options.isRemoveVersionsByInterval()) {
// Get file versions that should be purged according to the settings that are given. Time-based.
purgeFileVersions = collectPurgableFileVersions();
}
if (options.isRemoveOldVersions()) {
// Get all non-final fileversions and deleted (final) fileversions that we want to fully delete.
// purgeFileVersions is modified here!
purgeBeforeFileVersions = collectPurgeBeforeFileVersions(purgeFileVersions);
}
if (purgeFileVersions.isEmpty() && purgeBeforeFileVersions.isEmpty()) {
logger.log(Level.INFO, "- Old version removal: Not necessary.");
return;
}
logger.log(Level.INFO, "- Old version removal: Found {0} file histories and {1} file versions that need cleaning.", new Object[] { purgeFileVersions.size(), purgeBeforeFileVersions.size() });
// Local: First, remove file versions that are not longer needed
localDatabase.removeSmallerOrEqualFileVersions(purgeBeforeFileVersions);
localDatabase.removeFileVersions(purgeFileVersions);
// Local: Then, determine what must be changed remotely and remove it locally
Map<MultiChunkId, MultiChunkEntry> unusedMultiChunks = localDatabase.getUnusedMultiChunks();
localDatabase.removeUnreferencedDatabaseEntities();
deleteUnusedRemoteMultiChunks(unusedMultiChunks);
// Update stats
long unusedMultiChunkSize = 0;
for (MultiChunkEntry removedMultiChunk : unusedMultiChunks.values()) {
unusedMultiChunkSize += removedMultiChunk.getSize();
}
result.setRemovedOldVersionsCount(purgeBeforeFileVersions.size() + purgeFileVersions.size());
result.setRemovedMultiChunksCount(unusedMultiChunks.size());
result.setRemovedMultiChunksSize(unusedMultiChunkSize);
}
use of org.syncany.database.PartialFileHistory.FileHistoryId in project syncany by syncany.
the class DatabaseXmlParseHandler method startElement.
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
elementPath += "/" + qName;
if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion")) {
databaseVersion = new DatabaseVersion();
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/time")) {
Date timeValue = new Date(Long.parseLong(attributes.getValue("value")));
databaseVersion.setTimestamp(timeValue);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/client")) {
String clientName = attributes.getValue("name");
databaseVersion.setClient(clientName);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/vectorClock")) {
vectorClock = new VectorClock();
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/vectorClock/client")) {
String clientName = attributes.getValue("name");
Long clientValue = Long.parseLong(attributes.getValue("value"));
vectorClock.setClock(clientName, clientValue);
} else if (readType == DatabaseReadType.FULL) {
if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/chunks/chunk")) {
String chunkChecksumStr = attributes.getValue("checksum");
ChunkChecksum chunkChecksum = ChunkChecksum.parseChunkChecksum(chunkChecksumStr);
int chunkSize = Integer.parseInt(attributes.getValue("size"));
ChunkEntry chunkEntry = new ChunkEntry(chunkChecksum, chunkSize);
databaseVersion.addChunk(chunkEntry);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileContents/fileContent")) {
String checksumStr = attributes.getValue("checksum");
long size = Long.parseLong(attributes.getValue("size"));
fileContent = new FileContent();
fileContent.setChecksum(FileChecksum.parseFileChecksum(checksumStr));
fileContent.setSize(size);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileContents/fileContent/chunkRefs/chunkRef")) {
String chunkChecksumStr = attributes.getValue("ref");
fileContent.addChunk(ChunkChecksum.parseChunkChecksum(chunkChecksumStr));
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/multiChunks/multiChunk")) {
String multChunkIdStr = attributes.getValue("id");
MultiChunkId multiChunkId = MultiChunkId.parseMultiChunkId(multChunkIdStr);
long size = Long.parseLong(attributes.getValue("size"));
if (multiChunkId == null) {
throw new SAXException("Cannot read ID from multichunk " + multChunkIdStr);
}
multiChunk = new MultiChunkEntry(multiChunkId, size);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/multiChunks/multiChunk/chunkRefs/chunkRef")) {
String chunkChecksumStr = attributes.getValue("ref");
multiChunk.addChunk(ChunkChecksum.parseChunkChecksum(chunkChecksumStr));
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileHistories/fileHistory")) {
String fileHistoryIdStr = attributes.getValue("id");
FileHistoryId fileId = FileHistoryId.parseFileId(fileHistoryIdStr);
fileHistory = new PartialFileHistory(fileId);
} else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileHistories/fileHistory/fileVersions/fileVersion")) {
String fileVersionStr = attributes.getValue("version");
String path = attributes.getValue("path");
String pathEncoded = attributes.getValue("pathEncoded");
String sizeStr = attributes.getValue("size");
String typeStr = attributes.getValue("type");
String statusStr = attributes.getValue("status");
String lastModifiedStr = attributes.getValue("lastModified");
String updatedStr = attributes.getValue("updated");
String checksumStr = attributes.getValue("checksum");
String linkTarget = attributes.getValue("linkTarget");
String dosAttributes = attributes.getValue("dosattrs");
String posixPermissions = attributes.getValue("posixperms");
if (fileVersionStr == null || (path == null && pathEncoded == null) || typeStr == null || statusStr == null || sizeStr == null || lastModifiedStr == null) {
throw new SAXException("FileVersion: Attributes missing: version, path/pathEncoded, type, status, size and last modified are mandatory");
}
// Filter it if it was purged somewhere in the future, see #58
Long fileVersionNum = Long.parseLong(fileVersionStr);
// Go add it!
FileVersion fileVersion = new FileVersion();
fileVersion.setVersion(fileVersionNum);
if (path != null) {
fileVersion.setPath(path);
} else {
try {
fileVersion.setPath(new String(Base64.decodeBase64(pathEncoded), "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("Invalid Base64 encoding for filename: " + pathEncoded);
}
}
fileVersion.setType(FileType.valueOf(typeStr));
fileVersion.setStatus(FileStatus.valueOf(statusStr));
fileVersion.setSize(Long.parseLong(sizeStr));
fileVersion.setLastModified(new Date(Long.parseLong(lastModifiedStr)));
if (updatedStr != null) {
fileVersion.setUpdated(new Date(Long.parseLong(updatedStr)));
}
if (checksumStr != null) {
fileVersion.setChecksum(FileChecksum.parseFileChecksum(checksumStr));
}
if (linkTarget != null) {
fileVersion.setLinkTarget(linkTarget);
}
if (dosAttributes != null) {
fileVersion.setDosAttributes(dosAttributes);
}
if (posixPermissions != null) {
fileVersion.setPosixPermissions(posixPermissions);
}
fileHistory.addFileVersion(fileVersion);
}
}
}
use of org.syncany.database.PartialFileHistory.FileHistoryId in project syncany by syncany.
the class FileHistorySqlDao method expandFileHistoryId.
public FileHistoryId expandFileHistoryId(FileHistoryId fileHistoryIdPrefix) {
String fileHistoryIdPrefixLikeQuery = fileHistoryIdPrefix.toString() + "%";
try (PreparedStatement preparedStatement = getStatement("filehistory.select.master.expandFileHistoryId.sql")) {
preparedStatement.setString(1, fileHistoryIdPrefixLikeQuery);
try (ResultSet resultSet = preparedStatement.executeQuery()) {
if (resultSet.next()) {
FileHistoryId fullFileHistoryId = FileHistoryId.parseFileId(resultSet.getString("filehistory_id"));
boolean nonUniqueResult = resultSet.next();
if (nonUniqueResult) {
return null;
} else {
return fullFileHistoryId;
}
} else {
return null;
}
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
use of org.syncany.database.PartialFileHistory.FileHistoryId in project syncany by syncany.
the class FileVersionSqlDao method getAllVersionsInQuery.
private Map<FileHistoryId, List<FileVersion>> getAllVersionsInQuery(PreparedStatement preparedStatement) throws SQLException {
try (ResultSet resultSet = preparedStatement.executeQuery()) {
Map<FileHistoryId, List<FileVersion>> fileHistoryPurgeFileVersions = new HashMap<FileHistoryId, List<FileVersion>>();
while (resultSet.next()) {
FileHistoryId fileHistoryId = FileHistoryId.parseFileId(resultSet.getString("filehistory_id"));
FileVersion fileVersion = createFileVersionFromRow(resultSet);
List<FileVersion> purgeFileVersions = fileHistoryPurgeFileVersions.get(fileHistoryId);
if (purgeFileVersions == null) {
purgeFileVersions = new ArrayList<FileVersion>();
fileHistoryPurgeFileVersions.put(fileHistoryId, purgeFileVersions);
}
purgeFileVersions.add(fileVersion);
}
return fileHistoryPurgeFileVersions;
}
}
Aggregations