Search in sources :

Example 11 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class MetaTableAccessor method deleteRegion.

/**
   * Deletes the specified region from META.
   * @param connection connection we're using
   * @param regionInfo region to be deleted from META
   * @throws IOException
   */
public static void deleteRegion(Connection connection, HRegionInfo regionInfo) throws IOException {
    long time = EnvironmentEdgeManager.currentTime();
    Delete delete = new Delete(regionInfo.getRegionName());
    delete.addFamily(getCatalogFamily(), time);
    deleteFromMetaTable(connection, delete);
    LOG.info("Deleted " + regionInfo.getRegionNameAsString());
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete)

Example 12 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class HelloHBase method deleteRow.

/**
   * Invokes Table#delete to delete test data (i.e. the row)
   *
   * @param table Standard Table object
   * @throws IOException If IO problem is encountered
   */
static void deleteRow(final Table table) throws IOException {
    System.out.println("Deleting row [" + Bytes.toString(MY_ROW_ID) + "] from Table [" + table.getName().getNameAsString() + "].");
    table.delete(new Delete(MY_ROW_ID));
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete)

Example 13 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class RSGroupInfoManagerImpl method flushConfigTable.

private synchronized Map<TableName, String> flushConfigTable(Map<String, RSGroupInfo> groupMap) throws IOException {
    Map<TableName, String> newTableMap = Maps.newHashMap();
    List<Mutation> mutations = Lists.newArrayList();
    // populate deletes
    for (String groupName : prevRSGroups) {
        if (!groupMap.containsKey(groupName)) {
            Delete d = new Delete(Bytes.toBytes(groupName));
            mutations.add(d);
        }
    }
    // populate puts
    for (RSGroupInfo RSGroupInfo : groupMap.values()) {
        RSGroupProtos.RSGroupInfo proto = RSGroupProtobufUtil.toProtoGroupInfo(RSGroupInfo);
        Put p = new Put(Bytes.toBytes(RSGroupInfo.getName()));
        p.addColumn(META_FAMILY_BYTES, META_QUALIFIER_BYTES, proto.toByteArray());
        mutations.add(p);
        for (TableName entry : RSGroupInfo.getTables()) {
            newTableMap.put(entry, RSGroupInfo.getName());
        }
    }
    if (mutations.size() > 0) {
        multiMutate(mutations);
    }
    return newTableMap;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) TableName(org.apache.hadoop.hbase.TableName) RSGroupProtos(org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Example 14 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class RSGroupInfoManagerImpl method multiMutate.

private void multiMutate(List<Mutation> mutations) throws IOException {
    CoprocessorRpcChannel channel = rsGroupTable.coprocessorService(ROW_KEY);
    MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder = MultiRowMutationProtos.MutateRowsRequest.newBuilder();
    for (Mutation mutation : mutations) {
        if (mutation instanceof Put) {
            mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT, mutation));
        } else if (mutation instanceof Delete) {
            mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.DELETE, mutation));
        } else {
            throw new DoNotRetryIOException("multiMutate doesn't support " + mutation.getClass().getName());
        }
    }
    MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service = MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel);
    try {
        service.mutateRows(null, mmrBuilder.build());
    } catch (ServiceException ex) {
        ProtobufUtil.toIOException(ex);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) ServiceException(com.google.protobuf.ServiceException) CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Example 15 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class TestRemoteHTableRetries method testDelete.

@Test
public void testDelete() throws Exception {
    testTimedOutCall(new CallExecutor() {

        @Override
        public void run() throws Exception {
            Delete delete = new Delete(Bytes.toBytes("delete"));
            remoteTable.delete(delete);
        }
    });
    verify(client, times(RETRIES)).delete(anyString());
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) IOException(java.io.IOException) Test(org.junit.Test)

Aggregations

Delete (org.apache.hadoop.hbase.client.Delete)306 Test (org.junit.Test)150 Put (org.apache.hadoop.hbase.client.Put)149 Result (org.apache.hadoop.hbase.client.Result)111 Table (org.apache.hadoop.hbase.client.Table)101 Scan (org.apache.hadoop.hbase.client.Scan)95 IOException (java.io.IOException)86 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)79 Cell (org.apache.hadoop.hbase.Cell)75 TableName (org.apache.hadoop.hbase.TableName)66 ArrayList (java.util.ArrayList)64 Connection (org.apache.hadoop.hbase.client.Connection)55 InterruptedIOException (java.io.InterruptedIOException)45 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)44 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)42 Get (org.apache.hadoop.hbase.client.Get)41 Mutation (org.apache.hadoop.hbase.client.Mutation)33 CellScanner (org.apache.hadoop.hbase.CellScanner)32 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)27 Admin (org.apache.hadoop.hbase.client.Admin)20