Search in sources :

Example 16 with FileHistoryId

use of org.syncany.database.PartialFileHistory.FileHistoryId in project syncany by syncany.

the class CleanupOperation method collectPurgableFileVersions.

/**
	 * For all time intervals defined in the purge file settings, determine the eligible file
	 * versions to be purged -- namely all but the newest one.
	 * 
	 * @see CleanupOperation 
	 * @see CleanupOperationOptions#getPurgeFileVersionSettings()
	 * @see FileVersionSqlDao#getFileHistoriesToPurgeInInterval(long, long, TimeUnit)
	 */
private Map<FileHistoryId, List<FileVersion>> collectPurgableFileVersions() {
    Map<FileHistoryId, List<FileVersion>> purgeFileVersions = new HashMap<FileHistoryId, List<FileVersion>>();
    long currentTime = System.currentTimeMillis();
    long previousTruncateIntervalTimeMultiplier = 0;
    for (Map.Entry<Long, TimeUnit> purgeFileVersionSetting : options.getPurgeFileVersionSettings().entrySet()) {
        Long truncateIntervalMultiplier = purgeFileVersionSetting.getKey();
        TimeUnit truncateIntervalTimeUnit = purgeFileVersionSetting.getValue();
        long beginIntervalTimestamp = currentTime - truncateIntervalMultiplier * 1000;
        long endIntervalTimestamp = currentTime - previousTruncateIntervalTimeMultiplier * 1000;
        Map<FileHistoryId, List<FileVersion>> newPurgeFileVersions = localDatabase.getFileHistoriesToPurgeInInterval(beginIntervalTimestamp, endIntervalTimestamp, truncateIntervalTimeUnit);
        putAllFileVersionsInMap(newPurgeFileVersions, purgeFileVersions);
        previousTruncateIntervalTimeMultiplier = truncateIntervalMultiplier;
    }
    return purgeFileVersions;
}
Also used : FileHistoryId(org.syncany.database.PartialFileHistory.FileHistoryId) HashMap(java.util.HashMap) FileVersion(org.syncany.database.FileVersion) TimeUnit(org.syncany.operations.cleanup.CleanupOperationOptions.TimeUnit) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap) SortedMap(java.util.SortedMap)

Example 17 with FileHistoryId

use of org.syncany.database.PartialFileHistory.FileHistoryId in project syncany by syncany.

the class CleanupOperation method removeOldVersions.

/**
	 * This method checks if there exist {@link FileVersion}s which are to be deleted because the history they are a part
	 * of is too long. It will collect these, remove them locally and add them to the {@link RemoteTransaction} for deletion.
	 */
private void removeOldVersions() throws Exception {
    Map<FileHistoryId, List<FileVersion>> purgeFileVersions = new TreeMap<FileHistoryId, List<FileVersion>>();
    Map<FileHistoryId, FileVersion> purgeBeforeFileVersions = new TreeMap<FileHistoryId, FileVersion>();
    if (options.isRemoveVersionsByInterval()) {
        // Get file versions that should be purged according to the settings that are given. Time-based.
        purgeFileVersions = collectPurgableFileVersions();
    }
    if (options.isRemoveOldVersions()) {
        // Get all non-final fileversions and deleted (final) fileversions that we want to fully delete.
        // purgeFileVersions is modified here!
        purgeBeforeFileVersions = collectPurgeBeforeFileVersions(purgeFileVersions);
    }
    if (purgeFileVersions.isEmpty() && purgeBeforeFileVersions.isEmpty()) {
        logger.log(Level.INFO, "- Old version removal: Not necessary.");
        return;
    }
    logger.log(Level.INFO, "- Old version removal: Found {0} file histories and {1} file versions that need cleaning.", new Object[] { purgeFileVersions.size(), purgeBeforeFileVersions.size() });
    // Local: First, remove file versions that are not longer needed
    localDatabase.removeSmallerOrEqualFileVersions(purgeBeforeFileVersions);
    localDatabase.removeFileVersions(purgeFileVersions);
    // Local: Then, determine what must be changed remotely and remove it locally
    Map<MultiChunkId, MultiChunkEntry> unusedMultiChunks = localDatabase.getUnusedMultiChunks();
    localDatabase.removeUnreferencedDatabaseEntities();
    deleteUnusedRemoteMultiChunks(unusedMultiChunks);
    // Update stats
    long unusedMultiChunkSize = 0;
    for (MultiChunkEntry removedMultiChunk : unusedMultiChunks.values()) {
        unusedMultiChunkSize += removedMultiChunk.getSize();
    }
    result.setRemovedOldVersionsCount(purgeBeforeFileVersions.size() + purgeFileVersions.size());
    result.setRemovedMultiChunksCount(unusedMultiChunks.size());
    result.setRemovedMultiChunksSize(unusedMultiChunkSize);
}
Also used : FileHistoryId(org.syncany.database.PartialFileHistory.FileHistoryId) MultiChunkId(org.syncany.database.MultiChunkEntry.MultiChunkId) FileVersion(org.syncany.database.FileVersion) ArrayList(java.util.ArrayList) List(java.util.List) TreeMap(java.util.TreeMap) MultiChunkEntry(org.syncany.database.MultiChunkEntry)

Example 18 with FileHistoryId

use of org.syncany.database.PartialFileHistory.FileHistoryId in project syncany by syncany.

the class DatabaseXmlParseHandler method startElement.

@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
    elementPath += "/" + qName;
    if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion")) {
        databaseVersion = new DatabaseVersion();
    } else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/time")) {
        Date timeValue = new Date(Long.parseLong(attributes.getValue("value")));
        databaseVersion.setTimestamp(timeValue);
    } else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/client")) {
        String clientName = attributes.getValue("name");
        databaseVersion.setClient(clientName);
    } else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/vectorClock")) {
        vectorClock = new VectorClock();
    } else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/header/vectorClock/client")) {
        String clientName = attributes.getValue("name");
        Long clientValue = Long.parseLong(attributes.getValue("value"));
        vectorClock.setClock(clientName, clientValue);
    } else if (readType == DatabaseReadType.FULL) {
        if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/chunks/chunk")) {
            String chunkChecksumStr = attributes.getValue("checksum");
            ChunkChecksum chunkChecksum = ChunkChecksum.parseChunkChecksum(chunkChecksumStr);
            int chunkSize = Integer.parseInt(attributes.getValue("size"));
            ChunkEntry chunkEntry = new ChunkEntry(chunkChecksum, chunkSize);
            databaseVersion.addChunk(chunkEntry);
        } else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileContents/fileContent")) {
            String checksumStr = attributes.getValue("checksum");
            long size = Long.parseLong(attributes.getValue("size"));
            fileContent = new FileContent();
            fileContent.setChecksum(FileChecksum.parseFileChecksum(checksumStr));
            fileContent.setSize(size);
        } else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileContents/fileContent/chunkRefs/chunkRef")) {
            String chunkChecksumStr = attributes.getValue("ref");
            fileContent.addChunk(ChunkChecksum.parseChunkChecksum(chunkChecksumStr));
        } else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/multiChunks/multiChunk")) {
            String multChunkIdStr = attributes.getValue("id");
            MultiChunkId multiChunkId = MultiChunkId.parseMultiChunkId(multChunkIdStr);
            long size = Long.parseLong(attributes.getValue("size"));
            if (multiChunkId == null) {
                throw new SAXException("Cannot read ID from multichunk " + multChunkIdStr);
            }
            multiChunk = new MultiChunkEntry(multiChunkId, size);
        } else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/multiChunks/multiChunk/chunkRefs/chunkRef")) {
            String chunkChecksumStr = attributes.getValue("ref");
            multiChunk.addChunk(ChunkChecksum.parseChunkChecksum(chunkChecksumStr));
        } else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileHistories/fileHistory")) {
            String fileHistoryIdStr = attributes.getValue("id");
            FileHistoryId fileId = FileHistoryId.parseFileId(fileHistoryIdStr);
            fileHistory = new PartialFileHistory(fileId);
        } else if (elementPath.equalsIgnoreCase("/database/databaseVersions/databaseVersion/fileHistories/fileHistory/fileVersions/fileVersion")) {
            String fileVersionStr = attributes.getValue("version");
            String path = attributes.getValue("path");
            String pathEncoded = attributes.getValue("pathEncoded");
            String sizeStr = attributes.getValue("size");
            String typeStr = attributes.getValue("type");
            String statusStr = attributes.getValue("status");
            String lastModifiedStr = attributes.getValue("lastModified");
            String updatedStr = attributes.getValue("updated");
            String checksumStr = attributes.getValue("checksum");
            String linkTarget = attributes.getValue("linkTarget");
            String dosAttributes = attributes.getValue("dosattrs");
            String posixPermissions = attributes.getValue("posixperms");
            if (fileVersionStr == null || (path == null && pathEncoded == null) || typeStr == null || statusStr == null || sizeStr == null || lastModifiedStr == null) {
                throw new SAXException("FileVersion: Attributes missing: version, path/pathEncoded, type, status, size and last modified are mandatory");
            }
            // Filter it if it was purged somewhere in the future, see #58
            Long fileVersionNum = Long.parseLong(fileVersionStr);
            // Go add it!
            FileVersion fileVersion = new FileVersion();
            fileVersion.setVersion(fileVersionNum);
            if (path != null) {
                fileVersion.setPath(path);
            } else {
                try {
                    fileVersion.setPath(new String(Base64.decodeBase64(pathEncoded), "UTF-8"));
                } catch (UnsupportedEncodingException e) {
                    throw new RuntimeException("Invalid Base64 encoding for filename: " + pathEncoded);
                }
            }
            fileVersion.setType(FileType.valueOf(typeStr));
            fileVersion.setStatus(FileStatus.valueOf(statusStr));
            fileVersion.setSize(Long.parseLong(sizeStr));
            fileVersion.setLastModified(new Date(Long.parseLong(lastModifiedStr)));
            if (updatedStr != null) {
                fileVersion.setUpdated(new Date(Long.parseLong(updatedStr)));
            }
            if (checksumStr != null) {
                fileVersion.setChecksum(FileChecksum.parseFileChecksum(checksumStr));
            }
            if (linkTarget != null) {
                fileVersion.setLinkTarget(linkTarget);
            }
            if (dosAttributes != null) {
                fileVersion.setDosAttributes(dosAttributes);
            }
            if (posixPermissions != null) {
                fileVersion.setPosixPermissions(posixPermissions);
            }
            fileHistory.addFileVersion(fileVersion);
        }
    }
}
Also used : FileHistoryId(org.syncany.database.PartialFileHistory.FileHistoryId) MultiChunkId(org.syncany.database.MultiChunkEntry.MultiChunkId) VectorClock(org.syncany.database.VectorClock) UnsupportedEncodingException(java.io.UnsupportedEncodingException) ChunkChecksum(org.syncany.database.ChunkEntry.ChunkChecksum) Date(java.util.Date) PartialFileHistory(org.syncany.database.PartialFileHistory) SAXException(org.xml.sax.SAXException) FileContent(org.syncany.database.FileContent) MultiChunkEntry(org.syncany.database.MultiChunkEntry) ChunkEntry(org.syncany.database.ChunkEntry) FileVersion(org.syncany.database.FileVersion) MultiChunkEntry(org.syncany.database.MultiChunkEntry) DatabaseVersion(org.syncany.database.DatabaseVersion)

Example 19 with FileHistoryId

use of org.syncany.database.PartialFileHistory.FileHistoryId in project syncany by syncany.

the class FileHistorySqlDao method expandFileHistoryId.

public FileHistoryId expandFileHistoryId(FileHistoryId fileHistoryIdPrefix) {
    String fileHistoryIdPrefixLikeQuery = fileHistoryIdPrefix.toString() + "%";
    try (PreparedStatement preparedStatement = getStatement("filehistory.select.master.expandFileHistoryId.sql")) {
        preparedStatement.setString(1, fileHistoryIdPrefixLikeQuery);
        try (ResultSet resultSet = preparedStatement.executeQuery()) {
            if (resultSet.next()) {
                FileHistoryId fullFileHistoryId = FileHistoryId.parseFileId(resultSet.getString("filehistory_id"));
                boolean nonUniqueResult = resultSet.next();
                if (nonUniqueResult) {
                    return null;
                } else {
                    return fullFileHistoryId;
                }
            } else {
                return null;
            }
        }
    } catch (SQLException e) {
        throw new RuntimeException(e);
    }
}
Also used : FileHistoryId(org.syncany.database.PartialFileHistory.FileHistoryId) SQLException(java.sql.SQLException) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement)

Example 20 with FileHistoryId

use of org.syncany.database.PartialFileHistory.FileHistoryId in project syncany by syncany.

the class FileVersionSqlDao method getAllVersionsInQuery.

private Map<FileHistoryId, List<FileVersion>> getAllVersionsInQuery(PreparedStatement preparedStatement) throws SQLException {
    try (ResultSet resultSet = preparedStatement.executeQuery()) {
        Map<FileHistoryId, List<FileVersion>> fileHistoryPurgeFileVersions = new HashMap<FileHistoryId, List<FileVersion>>();
        while (resultSet.next()) {
            FileHistoryId fileHistoryId = FileHistoryId.parseFileId(resultSet.getString("filehistory_id"));
            FileVersion fileVersion = createFileVersionFromRow(resultSet);
            List<FileVersion> purgeFileVersions = fileHistoryPurgeFileVersions.get(fileHistoryId);
            if (purgeFileVersions == null) {
                purgeFileVersions = new ArrayList<FileVersion>();
                fileHistoryPurgeFileVersions.put(fileHistoryId, purgeFileVersions);
            }
            purgeFileVersions.add(fileVersion);
        }
        return fileHistoryPurgeFileVersions;
    }
}
Also used : FileHistoryId(org.syncany.database.PartialFileHistory.FileHistoryId) HashMap(java.util.HashMap) FileVersion(org.syncany.database.FileVersion) ResultSet(java.sql.ResultSet) ArrayList(java.util.ArrayList) List(java.util.List)

Aggregations

FileHistoryId (org.syncany.database.PartialFileHistory.FileHistoryId)28 FileVersion (org.syncany.database.FileVersion)21 PartialFileHistory (org.syncany.database.PartialFileHistory)12 Test (org.junit.Test)10 DatabaseVersion (org.syncany.database.DatabaseVersion)8 File (java.io.File)6 ArrayList (java.util.ArrayList)6 MemoryDatabase (org.syncany.database.MemoryDatabase)6 PreparedStatement (java.sql.PreparedStatement)5 ResultSet (java.sql.ResultSet)5 HashMap (java.util.HashMap)5 MultiChunkId (org.syncany.database.MultiChunkEntry.MultiChunkId)4 SQLException (java.sql.SQLException)3 List (java.util.List)3 TreeMap (java.util.TreeMap)3 ChunkChecksum (org.syncany.database.ChunkEntry.ChunkChecksum)3 FileContent (org.syncany.database.FileContent)3 FileChecksum (org.syncany.database.FileContent.FileChecksum)3 MultiChunkEntry (org.syncany.database.MultiChunkEntry)3 RestoreOperationOptions (org.syncany.operations.restore.RestoreOperationOptions)3