Search in sources :

Example 46 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class HFileLink method getHFileFromBackReference.

/**
   * Get the full path of the HFile referenced by the back reference
   *
   * @param rootDir root hbase directory
   * @param linkRefPath Link Back Reference path
   * @return full path of the referenced hfile
   */
public static Path getHFileFromBackReference(final Path rootDir, final Path linkRefPath) {
    Pair<TableName, String> p = parseBackReferenceName(linkRefPath.getName());
    TableName linkTableName = p.getFirst();
    String linkRegionName = p.getSecond();
    String hfileName = getBackReferenceFileName(linkRefPath.getParent());
    Path familyPath = linkRefPath.getParent().getParent();
    Path regionPath = familyPath.getParent();
    Path tablePath = regionPath.getParent();
    String linkName = createHFileLinkName(FSUtils.getTableName(tablePath), regionPath.getName(), hfileName);
    Path linkTableDir = FSUtils.getTableDir(rootDir, linkTableName);
    Path regionDir = HRegion.getRegionDir(linkTableDir, linkRegionName);
    return new Path(new Path(regionDir, familyPath.getName()), linkName);
}
Also used : Path(org.apache.hadoop.fs.Path) TableName(org.apache.hadoop.hbase.TableName)

Example 47 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class SnapshotManager method prepareToTakeSnapshot.

/**
   * Check to make sure that we are OK to run the passed snapshot. Checks to make sure that we
   * aren't already running a snapshot or restore on the requested table.
   * @param snapshot description of the snapshot we want to start
   * @throws HBaseSnapshotException if the filesystem could not be prepared to start the snapshot
   */
private synchronized void prepareToTakeSnapshot(SnapshotDescription snapshot) throws HBaseSnapshotException {
    FileSystem fs = master.getMasterFileSystem().getFileSystem();
    Path workingDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(snapshot, rootDir);
    TableName snapshotTable = TableName.valueOf(snapshot.getTable());
    // make sure we aren't already running a snapshot
    if (isTakingSnapshot(snapshot)) {
        SnapshotSentinel handler = this.snapshotHandlers.get(snapshotTable);
        throw new SnapshotCreationException("Rejected taking " + ClientSnapshotDescriptionUtils.toString(snapshot) + " because we are already running another snapshot " + (handler != null ? ("on the same table " + ClientSnapshotDescriptionUtils.toString(handler.getSnapshot())) : "with the same name"), ProtobufUtil.createSnapshotDesc(snapshot));
    }
    // make sure we aren't running a restore on the same table
    if (isRestoringTable(snapshotTable)) {
        throw new SnapshotCreationException("Rejected taking " + ClientSnapshotDescriptionUtils.toString(snapshot) + " because we are already have a restore in progress on the same snapshot.");
    }
    try {
        // delete the working directory, since we aren't running the snapshot. Likely leftovers
        // from a failed attempt.
        fs.delete(workingDir, true);
        // recreate the working directory for the snapshot
        if (!fs.mkdirs(workingDir)) {
            throw new SnapshotCreationException("Couldn't create working directory (" + workingDir + ") for snapshot", ProtobufUtil.createSnapshotDesc(snapshot));
        }
    } catch (HBaseSnapshotException e) {
        throw e;
    } catch (IOException e) {
        throw new SnapshotCreationException("Exception while checking to see if snapshot could be started.", e, ProtobufUtil.createSnapshotDesc(snapshot));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) TableName(org.apache.hadoop.hbase.TableName) FileSystem(org.apache.hadoop.fs.FileSystem) MasterFileSystem(org.apache.hadoop.hbase.master.MasterFileSystem) SnapshotSentinel(org.apache.hadoop.hbase.master.SnapshotSentinel) SnapshotCreationException(org.apache.hadoop.hbase.snapshot.SnapshotCreationException) HBaseSnapshotException(org.apache.hadoop.hbase.snapshot.HBaseSnapshotException) IOException(java.io.IOException)

Example 48 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class SnapshotManager method toSnapshotDescription.

private SnapshotDescription toSnapshotDescription(ProcedureDescription desc) throws IOException {
    SnapshotDescription.Builder builder = SnapshotDescription.newBuilder();
    if (!desc.hasInstance()) {
        throw new IOException("Snapshot name is not defined: " + desc.toString());
    }
    String snapshotName = desc.getInstance();
    List<NameStringPair> props = desc.getConfigurationList();
    String table = null;
    for (NameStringPair prop : props) {
        if ("table".equalsIgnoreCase(prop.getName())) {
            table = prop.getValue();
        }
    }
    if (table == null) {
        throw new IOException("Snapshot table is not defined: " + desc.toString());
    }
    TableName tableName = TableName.valueOf(table);
    builder.setTable(tableName.getNameAsString());
    builder.setName(snapshotName);
    builder.setType(SnapshotDescription.Type.FLUSH);
    return builder.build();
}
Also used : TableName(org.apache.hadoop.hbase.TableName) NameStringPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair) SnapshotDescription(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.SnapshotDescription) IOException(java.io.IOException)

Example 49 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class SnapshotManager method restoreSnapshot.

/**
   * Restore the specified snapshot.
   * The restore will fail if the destination table has a snapshot or restore in progress.
   *
   * @param snapshot Snapshot Descriptor
   * @param hTableDescriptor Table Descriptor
   * @param nonceKey unique identifier to prevent duplicated RPC
   * @return procId the ID of the restore snapshot procedure
   */
private synchronized long restoreSnapshot(final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor, final NonceKey nonceKey) throws HBaseSnapshotException {
    final TableName tableName = hTableDescriptor.getTableName();
    // make sure we aren't running a snapshot on the same table
    if (isTakingSnapshot(tableName)) {
        throw new RestoreSnapshotException("Snapshot in progress on the restore table=" + tableName);
    }
    // make sure we aren't running a restore on the same table
    if (isRestoringTable(tableName)) {
        throw new RestoreSnapshotException("Restore already in progress on the table=" + tableName);
    }
    try {
        long procId = master.getMasterProcedureExecutor().submitProcedure(new RestoreSnapshotProcedure(master.getMasterProcedureExecutor().getEnvironment(), hTableDescriptor, snapshot), nonceKey);
        this.restoreTableToProcIdMap.put(tableName, procId);
        return procId;
    } catch (Exception e) {
        String msg = "Couldn't restore the snapshot=" + ClientSnapshotDescriptionUtils.toString(snapshot) + " on table=" + tableName;
        LOG.error(msg, e);
        throw new RestoreSnapshotException(msg, e);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) RestoreSnapshotProcedure(org.apache.hadoop.hbase.master.procedure.RestoreSnapshotProcedure) SnapshotExistsException(org.apache.hadoop.hbase.snapshot.SnapshotExistsException) HBaseSnapshotException(org.apache.hadoop.hbase.snapshot.HBaseSnapshotException) RestoreSnapshotException(org.apache.hadoop.hbase.snapshot.RestoreSnapshotException) AccessDeniedException(org.apache.hadoop.hbase.security.AccessDeniedException) FileNotFoundException(java.io.FileNotFoundException) TablePartiallyOpenException(org.apache.hadoop.hbase.snapshot.TablePartiallyOpenException) SnapshotCreationException(org.apache.hadoop.hbase.snapshot.SnapshotCreationException) ForeignException(org.apache.hadoop.hbase.errorhandling.ForeignException) KeeperException(org.apache.zookeeper.KeeperException) IOException(java.io.IOException) UnknownSnapshotException(org.apache.hadoop.hbase.snapshot.UnknownSnapshotException) SnapshotDoesNotExistException(org.apache.hadoop.hbase.snapshot.SnapshotDoesNotExistException) RestoreSnapshotException(org.apache.hadoop.hbase.snapshot.RestoreSnapshotException)

Example 50 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class SnapshotManager method removeSentinelIfFinished.

/**
   * Return the handler if it is currently live and has the same snapshot target name.
   * The handler is removed from the sentinels map if completed.
   * @param sentinels live handlers
   * @param snapshot snapshot description
   * @return null if doesn't match, else a live handler.
   */
private synchronized SnapshotSentinel removeSentinelIfFinished(final Map<TableName, SnapshotSentinel> sentinels, final SnapshotDescription snapshot) {
    if (!snapshot.hasTable()) {
        return null;
    }
    TableName snapshotTable = TableName.valueOf(snapshot.getTable());
    SnapshotSentinel h = sentinels.get(snapshotTable);
    if (h == null) {
        return null;
    }
    if (!h.getSnapshot().getName().equals(snapshot.getName())) {
        // specified snapshot is to the one currently running
        return null;
    }
    // Remove from the "in-progress" list once completed
    if (h.isFinished()) {
        sentinels.remove(snapshotTable);
    }
    return h;
}
Also used : TableName(org.apache.hadoop.hbase.TableName) SnapshotSentinel(org.apache.hadoop.hbase.master.SnapshotSentinel)

Aggregations

TableName (org.apache.hadoop.hbase.TableName)1033 Test (org.junit.Test)695 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)257 Table (org.apache.hadoop.hbase.client.Table)228 IOException (java.io.IOException)225 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)215 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)203 Result (org.apache.hadoop.hbase.client.Result)125 ArrayList (java.util.ArrayList)120 Put (org.apache.hadoop.hbase.client.Put)118 Path (org.apache.hadoop.fs.Path)113 Connection (org.apache.hadoop.hbase.client.Connection)103 Scan (org.apache.hadoop.hbase.client.Scan)98 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)89 ServerName (org.apache.hadoop.hbase.ServerName)85 Admin (org.apache.hadoop.hbase.client.Admin)85 Cell (org.apache.hadoop.hbase.Cell)77 HashMap (java.util.HashMap)75 Delete (org.apache.hadoop.hbase.client.Delete)66 InterruptedIOException (java.io.InterruptedIOException)63