Search in sources :

Example 66 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestSnapshotManifest method createRegionManifest.

private Path createRegionManifest() throws IOException {
    byte[] startKey = Bytes.toBytes("AAAAAA");
    byte[] stopKey = Bytes.toBytes("BBBBBB");
    HRegionInfo regionInfo = new HRegionInfo(TABLE_NAME, startKey, stopKey, false);
    SnapshotRegionManifest.Builder dataRegionManifestBuilder = SnapshotRegionManifest.newBuilder();
    dataRegionManifestBuilder.setRegionInfo(HRegionInfo.convert(regionInfo));
    for (HColumnDescriptor hcd : builder.getTableDescriptor().getFamilies()) {
        SnapshotRegionManifest.FamilyFiles.Builder family = SnapshotRegionManifest.FamilyFiles.newBuilder();
        family.setFamilyName(UnsafeByteOperations.unsafeWrap(hcd.getName()));
        for (int j = 0; j < TEST_NUM_REGIONFILES; ++j) {
            SnapshotRegionManifest.StoreFile.Builder sfManifest = SnapshotRegionManifest.StoreFile.newBuilder();
            sfManifest.setName(String.format("%064d", j));
            sfManifest.setFileSize(j * 1024);
            family.addStoreFiles(sfManifest.build());
        }
        dataRegionManifestBuilder.addFamilyFiles(family.build());
    }
    SnapshotRegionManifest manifest = dataRegionManifestBuilder.build();
    Path regionPath = new Path(snapshotDir, SnapshotManifestV2.SNAPSHOT_MANIFEST_PREFIX + regionInfo.getEncodedName());
    FSDataOutputStream stream = fs.create(regionPath);
    try {
        manifest.writeTo(stream);
    } finally {
        stream.close();
    }
    return regionPath;
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) Path(org.apache.hadoop.fs.Path) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) SnapshotRegionManifest(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream)

Example 67 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestSnapshotManifest method createDataManifest.

private Path createDataManifest() throws IOException {
    SnapshotDataManifest.Builder dataManifestBuilder = SnapshotDataManifest.newBuilder();
    byte[] startKey = null;
    byte[] stopKey = null;
    for (int i = 1; i <= TEST_NUM_REGIONS; i++) {
        stopKey = Bytes.toBytes(String.format("%016d", i));
        HRegionInfo regionInfo = new HRegionInfo(TABLE_NAME, startKey, stopKey, false);
        SnapshotRegionManifest.Builder dataRegionManifestBuilder = SnapshotRegionManifest.newBuilder();
        for (HColumnDescriptor hcd : builder.getTableDescriptor().getFamilies()) {
            SnapshotRegionManifest.FamilyFiles.Builder family = SnapshotRegionManifest.FamilyFiles.newBuilder();
            family.setFamilyName(UnsafeByteOperations.unsafeWrap(hcd.getName()));
            for (int j = 0; j < 100; ++j) {
                SnapshotRegionManifest.StoreFile.Builder sfManifest = SnapshotRegionManifest.StoreFile.newBuilder();
                sfManifest.setName(String.format("%032d", i));
                sfManifest.setFileSize((1 + i) * (1 + i) * 1024);
                family.addStoreFiles(sfManifest.build());
            }
            dataRegionManifestBuilder.addFamilyFiles(family.build());
        }
        dataRegionManifestBuilder.setRegionInfo(HRegionInfo.convert(regionInfo));
        dataManifestBuilder.addRegionManifests(dataRegionManifestBuilder.build());
        startKey = stopKey;
    }
    dataManifestBuilder.setTableSchema(ProtobufUtil.convertToTableSchema(builder.getTableDescriptor()));
    SnapshotDataManifest dataManifest = dataManifestBuilder.build();
    return writeDataManifest(dataManifest);
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) SnapshotDataManifest(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest) SnapshotRegionManifest(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest)

Example 68 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestVisibilityLabelsWithDeletes method testDeleteColumnsWithoutAndWithVisibilityLabels.

@Test
public void testDeleteColumnsWithoutAndWithVisibilityLabels() throws Exception {
    final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
    Admin hBaseAdmin = TEST_UTIL.getAdmin();
    HColumnDescriptor colDesc = new HColumnDescriptor(fam);
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(colDesc);
    hBaseAdmin.createTable(desc);
    try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
        Put put = new Put(row1);
        put.addColumn(fam, qual, value);
        put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
        table.put(put);
        Delete d = new Delete(row1);
        // without visibility
        d.addColumns(fam, qual, HConstants.LATEST_TIMESTAMP);
        table.delete(d);
        PrivilegedExceptionAction<Void> scanAction = new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                try (Connection connection = ConnectionFactory.createConnection(conf);
                    Table table = connection.getTable(tableName)) {
                    Scan s = new Scan();
                    ResultScanner scanner = table.getScanner(s);
                    Result[] next = scanner.next(3);
                    assertEquals(next.length, 1);
                } catch (Throwable t) {
                    throw new IOException(t);
                }
                return null;
            }
        };
        SUPERUSER.runAs(scanAction);
        d = new Delete(row1);
        // with visibility
        d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
        d.addColumns(fam, qual, HConstants.LATEST_TIMESTAMP);
        table.delete(d);
        scanAction = new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                try (Connection connection = ConnectionFactory.createConnection(conf);
                    Table table = connection.getTable(tableName)) {
                    Scan s = new Scan();
                    ResultScanner scanner = table.getScanner(s);
                    Result[] next = scanner.next(3);
                    assertEquals(next.length, 0);
                } catch (Throwable t) {
                    throw new IOException(t);
                }
                return null;
            }
        };
        SUPERUSER.runAs(scanAction);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Connection(org.apache.hadoop.hbase.client.Connection) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) RetriesExhaustedWithDetailsException(org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Result(org.apache.hadoop.hbase.client.Result) TableName(org.apache.hadoop.hbase.TableName) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Example 69 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestVisibilityLabelsWithDeletes method testDeleteColumnsWithDiffColsAndTags.

@Test
public void testDeleteColumnsWithDiffColsAndTags() throws Exception {
    final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
    Admin hBaseAdmin = TEST_UTIL.getAdmin();
    HColumnDescriptor colDesc = new HColumnDescriptor(fam);
    colDesc.setMaxVersions(5);
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(colDesc);
    hBaseAdmin.createTable(desc);
    try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
        Put put = new Put(Bytes.toBytes("row1"));
        put.addColumn(fam, qual1, 125l, value);
        put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
        table.put(put);
        put = new Put(Bytes.toBytes("row1"));
        put.addColumn(fam, qual1, 126l, value);
        put.setCellVisibility(new CellVisibility(SECRET));
        table.put(put);
        TEST_UTIL.getAdmin().flush(tableName);
        PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                Delete d1 = new Delete(row1);
                d1.setCellVisibility(new CellVisibility(SECRET));
                d1.addColumns(fam, qual, 126l);
                Delete d2 = new Delete(row1);
                d2.setCellVisibility(new CellVisibility(CONFIDENTIAL));
                d2.addColumns(fam, qual1, 125l);
                try (Connection connection = ConnectionFactory.createConnection(conf);
                    Table table = connection.getTable(tableName)) {
                    table.delete(createList(d1, d2));
                } catch (Throwable t) {
                    throw new IOException(t);
                }
                return null;
            }
        };
        SUPERUSER.runAs(actiona);
        Scan s = new Scan();
        s.setMaxVersions(5);
        s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
        ResultScanner scanner = table.getScanner(s);
        Result[] next = scanner.next(3);
        assertEquals(next.length, 1);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Connection(org.apache.hadoop.hbase.client.Connection) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Result(org.apache.hadoop.hbase.client.Result) TableName(org.apache.hadoop.hbase.TableName) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Example 70 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestVisibilityLabelsWithDeletes method testDeleteWithNoVisibilitiesForPutsAndDeletes.

@Test
public void testDeleteWithNoVisibilitiesForPutsAndDeletes() throws Exception {
    final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
    Admin hBaseAdmin = TEST_UTIL.getAdmin();
    HColumnDescriptor colDesc = new HColumnDescriptor(fam);
    colDesc.setMaxVersions(5);
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(colDesc);
    hBaseAdmin.createTable(desc);
    Put p = new Put(Bytes.toBytes("row1"));
    p.addColumn(fam, qual, value);
    Table table = TEST_UTIL.getConnection().getTable(tableName);
    table.put(p);
    p = new Put(Bytes.toBytes("row1"));
    p.addColumn(fam, qual1, value);
    table.put(p);
    p = new Put(Bytes.toBytes("row2"));
    p.addColumn(fam, qual, value);
    table.put(p);
    p = new Put(Bytes.toBytes("row2"));
    p.addColumn(fam, qual1, value);
    table.put(p);
    Delete d = new Delete(Bytes.toBytes("row1"));
    table.delete(d);
    Get g = new Get(Bytes.toBytes("row1"));
    g.setMaxVersions();
    g.setAuthorizations(new Authorizations(SECRET, PRIVATE));
    Result result = table.get(g);
    assertEquals(0, result.rawCells().length);
    p = new Put(Bytes.toBytes("row1"));
    p.addColumn(fam, qual, value);
    table.put(p);
    result = table.get(g);
    assertEquals(1, result.rawCells().length);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) TableName(org.apache.hadoop.hbase.TableName) Table(org.apache.hadoop.hbase.client.Table) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Get(org.apache.hadoop.hbase.client.Get) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Aggregations

HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)679 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)561 Test (org.junit.Test)358 TableName (org.apache.hadoop.hbase.TableName)200 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)137 Put (org.apache.hadoop.hbase.client.Put)132 Table (org.apache.hadoop.hbase.client.Table)118 IOException (java.io.IOException)112 Admin (org.apache.hadoop.hbase.client.Admin)112 Path (org.apache.hadoop.fs.Path)81 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)74 ArrayList (java.util.ArrayList)66 Configuration (org.apache.hadoop.conf.Configuration)65 Connection (org.apache.hadoop.hbase.client.Connection)52 Scan (org.apache.hadoop.hbase.client.Scan)50 Result (org.apache.hadoop.hbase.client.Result)45 FileSystem (org.apache.hadoop.fs.FileSystem)44 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)42 Connection (java.sql.Connection)41 Properties (java.util.Properties)38