Search in sources :

Example 1 with SnapshotDescription

use of org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription in project hbase by apache.

the class FileArchiverNotifierImpl method bucketFilesToSnapshot.

/**
 * For the given snapshot, find all files which this {@code snapshotName} references. After a file
 * is found to be referenced by the snapshot, it is removed from {@code filesToUpdate} and
 * {@code snapshotSizeChanges} is updated in concert.
 *
 * @param snapshotName The snapshot to check
 * @param filesToUpdate A mapping of archived files to their size
 * @param snapshotSizeChanges A mapping of snapshots and their change in size
 */
void bucketFilesToSnapshot(String snapshotName, Map<String, Long> filesToUpdate, Map<String, Long> snapshotSizeChanges) throws IOException {
    // A quick check to avoid doing work if the caller unnecessarily invoked this method.
    if (filesToUpdate.isEmpty()) {
        return;
    }
    Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, CommonFSUtils.getRootDir(conf));
    SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
    SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);
    // For each region referenced by the snapshot
    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {
        // For each column family in this region
        for (FamilyFiles ff : rm.getFamilyFilesList()) {
            // And each store file in that family
            for (StoreFile sf : ff.getStoreFilesList()) {
                Long valueOrNull = filesToUpdate.remove(sf.getName());
                if (valueOrNull != null) {
                    // This storefile was recently archived, we should update this snapshot with its size
                    snapshotSizeChanges.merge(snapshotName, valueOrNull, Long::sum);
                }
                // over the rest of the snapshot.
                if (filesToUpdate.isEmpty()) {
                    return;
                }
            }
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SnapshotManifest(org.apache.hadoop.hbase.snapshot.SnapshotManifest) FamilyFiles(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles) StoreFile(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile) SnapshotRegionManifest(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest) SnapshotDescription(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription)

Example 2 with SnapshotDescription

use of org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription in project hbase by apache.

the class RestoreTool method getTableDesc.

/**
 * Get table descriptor
 * @param tableName is the table backed up
 * @return {@link TableDescriptor} saved in backup image of the table
 */
TableDescriptor getTableDesc(TableName tableName) throws IOException {
    Path tableInfoPath = this.getTableInfoPath(tableName);
    SnapshotDescription desc = SnapshotDescriptionUtils.readSnapshotInfo(fs, tableInfoPath);
    SnapshotManifest manifest = SnapshotManifest.open(conf, fs, tableInfoPath, desc);
    TableDescriptor tableDescriptor = manifest.getTableDescriptor();
    if (!tableDescriptor.getTableName().equals(tableName)) {
        LOG.error("couldn't find Table Desc for table: " + tableName + " under tableInfoPath: " + tableInfoPath.toString());
        LOG.error("tableDescriptor.getNameAsString() = " + tableDescriptor.getTableName().getNameAsString());
        throw new FileNotFoundException("couldn't find Table Desc for table: " + tableName + " under tableInfoPath: " + tableInfoPath.toString());
    }
    return tableDescriptor;
}
Also used : Path(org.apache.hadoop.fs.Path) SnapshotManifest(org.apache.hadoop.hbase.snapshot.SnapshotManifest) FileNotFoundException(java.io.FileNotFoundException) SnapshotDescription(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 3 with SnapshotDescription

use of org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription in project hbase by apache.

the class MasterRpcServices method getCompletedSnapshots.

/**
 * List the currently available/stored snapshots. Any in-progress snapshots are ignored
 */
@Override
public GetCompletedSnapshotsResponse getCompletedSnapshots(RpcController controller, GetCompletedSnapshotsRequest request) throws ServiceException {
    try {
        server.checkInitialized();
        GetCompletedSnapshotsResponse.Builder builder = GetCompletedSnapshotsResponse.newBuilder();
        List<SnapshotDescription> snapshots = server.snapshotManager.getCompletedSnapshots();
        // convert to protobuf
        for (SnapshotDescription snapshot : snapshots) {
            builder.addSnapshots(snapshot);
        }
        return builder.build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : GetCompletedSnapshotsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) SnapshotDescription(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException)

Example 4 with SnapshotDescription

use of org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription in project hbase by apache.

the class ExportSnapshot method verifySnapshot.

private void verifySnapshot(final Configuration baseConf, final FileSystem fs, final Path rootDir, final Path snapshotDir) throws IOException {
    // Update the conf with the current root dir, since may be a different cluster
    Configuration conf = new Configuration(baseConf);
    CommonFSUtils.setRootDir(conf, rootDir);
    CommonFSUtils.setFsDefault(conf, CommonFSUtils.getRootDir(conf));
    SnapshotDescription snapshotDesc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
    SnapshotReferenceUtil.verifySnapshot(conf, fs, snapshotDir, snapshotDesc);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) SnapshotDescription(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription)

Example 5 with SnapshotDescription

use of org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription in project hbase by apache.

the class ExportSnapshot method getSnapshotFiles.

// ==========================================================================
// Input Format
// ==========================================================================
/**
 * Extract the list of files (HFiles/WALs) to copy using Map-Reduce.
 * @return list of files referenced by the snapshot (pair of path and size)
 */
private static List<Pair<SnapshotFileInfo, Long>> getSnapshotFiles(final Configuration conf, final FileSystem fs, final Path snapshotDir) throws IOException {
    SnapshotDescription snapshotDesc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
    final List<Pair<SnapshotFileInfo, Long>> files = new ArrayList<>();
    final TableName table = TableName.valueOf(snapshotDesc.getTable());
    // Get snapshot files
    LOG.info("Loading Snapshot '" + snapshotDesc.getName() + "' hfile list");
    SnapshotReferenceUtil.visitReferencedFiles(conf, fs, snapshotDir, snapshotDesc, new SnapshotReferenceUtil.SnapshotVisitor() {

        @Override
        public void storeFile(final RegionInfo regionInfo, final String family, final SnapshotRegionManifest.StoreFile storeFile) throws IOException {
            Pair<SnapshotFileInfo, Long> snapshotFileAndSize = null;
            if (!storeFile.hasReference()) {
                String region = regionInfo.getEncodedName();
                String hfile = storeFile.getName();
                snapshotFileAndSize = getSnapshotFileAndSize(fs, conf, table, region, family, hfile, storeFile.hasFileSize() ? storeFile.getFileSize() : -1);
            } else {
                Pair<String, String> referredToRegionAndFile = StoreFileInfo.getReferredToRegionAndFile(storeFile.getName());
                String referencedRegion = referredToRegionAndFile.getFirst();
                String referencedHFile = referredToRegionAndFile.getSecond();
                snapshotFileAndSize = getSnapshotFileAndSize(fs, conf, table, referencedRegion, family, referencedHFile, storeFile.hasFileSize() ? storeFile.getFileSize() : -1);
            }
            files.add(snapshotFileAndSize);
        }
    });
    return files;
}
Also used : ArrayList(java.util.ArrayList) SnapshotRegionManifest(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) SnapshotDescription(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription) IOException(java.io.IOException) TableName(org.apache.hadoop.hbase.TableName) Pair(org.apache.hadoop.hbase.util.Pair)

Aggregations

SnapshotDescription (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription)26 Path (org.apache.hadoop.fs.Path)16 IOException (java.io.IOException)12 FileSystem (org.apache.hadoop.fs.FileSystem)6 SnapshotRegionManifest (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest)6 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)5 SnapshotManifest (org.apache.hadoop.hbase.snapshot.SnapshotManifest)5 Test (org.junit.Test)5 FileNotFoundException (java.io.FileNotFoundException)4 ArrayList (java.util.ArrayList)4 TableName (org.apache.hadoop.hbase.TableName)4 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)4 HashSet (java.util.HashSet)3 Configuration (org.apache.hadoop.conf.Configuration)3 FileStatus (org.apache.hadoop.fs.FileStatus)3 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)3 MasterFileSystem (org.apache.hadoop.hbase.master.MasterFileSystem)3 InterruptedIOException (java.io.InterruptedIOException)2 ExecutionException (java.util.concurrent.ExecutionException)2 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)2