Search in sources :

Example 1 with Repo

use of org.apache.accumulo.fate.Repo in project accumulo by apache.

the class MoveExportedFiles method call.

@Override
public Repo<Manager> call(long tid, Manager manager) throws Exception {
    String fmtTid = FateTxId.formatTid(tid);
    int workerCount = manager.getConfiguration().getCount(Property.MANAGER_RENAME_THREADS);
    VolumeManager fs = manager.getVolumeManager();
    Map<Path, Path> oldToNewPaths = new HashMap<>();
    for (ImportedTableInfo.DirectoryMapping dm : tableInfo.directories) {
        Map<String, String> fileNameMappings = new HashMap<>();
        PopulateMetadataTable.readMappingFile(fs, tableInfo, dm.importDir, fileNameMappings);
        FileStatus[] exportedFiles = fs.listStatus(new Path(dm.exportDir));
        FileStatus[] importedFiles = fs.listStatus(new Path(dm.importDir));
        Function<FileStatus, String> fileStatusName = fstat -> fstat.getPath().getName();
        Set<String> importing = Arrays.stream(exportedFiles).map(fileStatusName).map(fileNameMappings::get).collect(Collectors.toSet());
        Set<String> imported = Arrays.stream(importedFiles).map(fileStatusName).collect(Collectors.toSet());
        if (log.isDebugEnabled()) {
            log.debug("{} files already present in imported (target) directory: {}", fmtTid, String.join(",", imported));
        }
        Set<String> missingFiles = Sets.difference(new HashSet<>(fileNameMappings.values()), new HashSet<>(Sets.union(importing, imported)));
        if (!missingFiles.isEmpty()) {
            throw new AcceptableThriftTableOperationException(tableInfo.tableId.canonical(), tableInfo.tableName, TableOperation.IMPORT, TableOperationExceptionType.OTHER, "Missing source files corresponding to files " + String.join(",", missingFiles));
        }
        for (FileStatus fileStatus : exportedFiles) {
            Path originalPath = fileStatus.getPath();
            String newName = fileNameMappings.get(originalPath.getName());
            // Need to exclude any other files which may be present in the exported directory
            if (newName != null) {
                Path newPath = new Path(dm.importDir, newName);
                // No try-catch here, as we do not expect any "benign" exceptions. Prior code already
                // accounts for files which were already moved. So anything returned by the rename
                // operation would be truly unexpected
                oldToNewPaths.put(originalPath, newPath);
            } else {
                log.debug("{} not moving (unmapped) file {}", fmtTid, originalPath);
            }
        }
    }
    try {
        fs.bulkRename(oldToNewPaths, workerCount, "importtable rename", fmtTid);
    } catch (IOException ioe) {
        throw new AcceptableThriftTableOperationException(tableInfo.tableId.canonical(), null, TableOperation.IMPORT, TableOperationExceptionType.OTHER, ioe.getCause().getMessage());
    }
    return new FinishImportTable(tableInfo);
}
Also used : Path(org.apache.hadoop.fs.Path) AcceptableThriftTableOperationException(org.apache.accumulo.core.clientImpl.AcceptableThriftTableOperationException) Arrays(java.util.Arrays) VolumeManager(org.apache.accumulo.server.fs.VolumeManager) Logger(org.slf4j.Logger) LoggerFactory(org.slf4j.LoggerFactory) Set(java.util.Set) IOException(java.io.IOException) HashMap(java.util.HashMap) FileStatus(org.apache.hadoop.fs.FileStatus) Function(java.util.function.Function) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) Repo(org.apache.accumulo.fate.Repo) HashSet(java.util.HashSet) TableOperation(org.apache.accumulo.core.clientImpl.thrift.TableOperation) Manager(org.apache.accumulo.manager.Manager) Map(java.util.Map) Path(org.apache.hadoop.fs.Path) TableOperationExceptionType(org.apache.accumulo.core.clientImpl.thrift.TableOperationExceptionType) FateTxId(org.apache.accumulo.fate.FateTxId) Property(org.apache.accumulo.core.conf.Property) ManagerRepo(org.apache.accumulo.manager.tableOps.ManagerRepo) VolumeManager(org.apache.accumulo.server.fs.VolumeManager) FileStatus(org.apache.hadoop.fs.FileStatus) HashMap(java.util.HashMap) IOException(java.io.IOException) AcceptableThriftTableOperationException(org.apache.accumulo.core.clientImpl.AcceptableThriftTableOperationException)

Example 2 with Repo

use of org.apache.accumulo.fate.Repo in project accumulo by apache.

the class CompactRange method call.

@Override
public Repo<Manager> call(final long tid, Manager env) throws Exception {
    String zTablePath = Constants.ZROOT + "/" + env.getInstanceID() + Constants.ZTABLES + "/" + tableId + Constants.ZTABLE_COMPACT_ID;
    ZooReaderWriter zoo = env.getContext().getZooReaderWriter();
    byte[] cid;
    try {
        cid = zoo.mutateExisting(zTablePath, currentValue -> {
            String cvs = new String(currentValue, UTF_8);
            String[] tokens = cvs.split(",");
            long flushID = Long.parseLong(tokens[0]) + 1;
            String txidString = String.format("%016x", tid);
            for (int i = 1; i < tokens.length; i++) {
                if (tokens[i].startsWith(txidString))
                    // skip self
                    continue;
                log.debug("txidString : {}", txidString);
                log.debug("tokens[{}] : {}", i, tokens[i]);
                throw new AcceptableThriftTableOperationException(tableId.canonical(), null, TableOperation.COMPACT, TableOperationExceptionType.OTHER, "Another compaction with iterators and/or a compaction strategy is running");
            }
            StringBuilder encodedIterators = new StringBuilder();
            if (config != null) {
                Hex hex = new Hex();
                encodedIterators.append(",");
                encodedIterators.append(txidString);
                encodedIterators.append("=");
                encodedIterators.append(new String(hex.encode(config), UTF_8));
            }
            return (Long.toString(flushID) + encodedIterators).getBytes(UTF_8);
        });
        return new CompactionDriver(Long.parseLong(new String(cid, UTF_8).split(",")[0]), namespaceId, tableId, startRow, endRow);
    } catch (NoNodeException nne) {
        throw new AcceptableThriftTableOperationException(tableId.canonical(), null, TableOperation.COMPACT, TableOperationExceptionType.NOTFOUND, null);
    }
}
Also used : UserCompactionUtils.isDefault(org.apache.accumulo.core.clientImpl.UserCompactionUtils.isDefault) TableId(org.apache.accumulo.core.data.TableId) AcceptableThriftTableOperationException(org.apache.accumulo.core.clientImpl.AcceptableThriftTableOperationException) Logger(org.slf4j.Logger) UTF_8(java.nio.charset.StandardCharsets.UTF_8) LoggerFactory(org.slf4j.LoggerFactory) CompactionConfig(org.apache.accumulo.core.client.admin.CompactionConfig) Hex(org.apache.commons.codec.binary.Hex) Constants(org.apache.accumulo.core.Constants) TextUtil(org.apache.accumulo.core.util.TextUtil) Repo(org.apache.accumulo.fate.Repo) TableOperation(org.apache.accumulo.core.clientImpl.thrift.TableOperation) Manager(org.apache.accumulo.manager.Manager) NoNodeException(org.apache.zookeeper.KeeperException.NoNodeException) Objects.requireNonNull(java.util.Objects.requireNonNull) Optional(java.util.Optional) Utils(org.apache.accumulo.manager.tableOps.Utils) CompactionStrategyConfigUtil(org.apache.accumulo.core.clientImpl.CompactionStrategyConfigUtil) TableOperationExceptionType(org.apache.accumulo.core.clientImpl.thrift.TableOperationExceptionType) NamespaceId(org.apache.accumulo.core.data.NamespaceId) ZooReaderWriter(org.apache.accumulo.fate.zookeeper.ZooReaderWriter) UserCompactionUtils(org.apache.accumulo.core.clientImpl.UserCompactionUtils) ManagerRepo(org.apache.accumulo.manager.tableOps.ManagerRepo) NoNodeException(org.apache.zookeeper.KeeperException.NoNodeException) ZooReaderWriter(org.apache.accumulo.fate.zookeeper.ZooReaderWriter) Hex(org.apache.commons.codec.binary.Hex) AcceptableThriftTableOperationException(org.apache.accumulo.core.clientImpl.AcceptableThriftTableOperationException)

Aggregations

AcceptableThriftTableOperationException (org.apache.accumulo.core.clientImpl.AcceptableThriftTableOperationException)2 TableOperation (org.apache.accumulo.core.clientImpl.thrift.TableOperation)2 TableOperationExceptionType (org.apache.accumulo.core.clientImpl.thrift.TableOperationExceptionType)2 Repo (org.apache.accumulo.fate.Repo)2 Manager (org.apache.accumulo.manager.Manager)2 ManagerRepo (org.apache.accumulo.manager.tableOps.ManagerRepo)2 Logger (org.slf4j.Logger)2 LoggerFactory (org.slf4j.LoggerFactory)2 Sets (com.google.common.collect.Sets)1 IOException (java.io.IOException)1 UTF_8 (java.nio.charset.StandardCharsets.UTF_8)1 Arrays (java.util.Arrays)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Map (java.util.Map)1 Objects.requireNonNull (java.util.Objects.requireNonNull)1 Optional (java.util.Optional)1 Set (java.util.Set)1 Function (java.util.function.Function)1 Collectors (java.util.stream.Collectors)1