Search in sources :

Example 6 with ColumnDescriptor

use of org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor in project hbase by apache.

the class ThriftUtilities method colDescFromHbase.

/**
 * This utility method creates a new Thrift ColumnDescriptor "struct" based on
 * an Hbase HColumnDescriptor object.
 *
 * @param in
 *          Hbase HColumnDescriptor object
 * @return Thrift ColumnDescriptor
 */
public static ColumnDescriptor colDescFromHbase(ColumnFamilyDescriptor in) {
    ColumnDescriptor col = new ColumnDescriptor();
    col.name = ByteBuffer.wrap(Bytes.add(in.getName(), KeyValue.COLUMN_FAMILY_DELIM_ARRAY));
    col.maxVersions = in.getMaxVersions();
    col.compression = in.getCompressionType().toString();
    col.inMemory = in.isInMemory();
    col.blockCacheEnabled = in.isBlockCacheEnabled();
    col.bloomFilterType = in.getBloomFilterType().toString();
    col.timeToLive = in.getTimeToLive();
    return col;
}
Also used : ColumnDescriptor(org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor)

Example 7 with ColumnDescriptor

use of org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor in project hbase by apache.

the class ThriftHBaseServiceHandler method getColumnDescriptors.

@Override
public Map<ByteBuffer, ColumnDescriptor> getColumnDescriptors(ByteBuffer tableName) throws IOError, TException {
    Table table = null;
    try {
        TreeMap<ByteBuffer, ColumnDescriptor> columns = new TreeMap<>();
        table = getTable(tableName);
        TableDescriptor desc = table.getDescriptor();
        for (ColumnFamilyDescriptor e : desc.getColumnFamilies()) {
            ColumnDescriptor col = ThriftUtilities.colDescFromHbase(e);
            columns.put(col.name, col);
        }
        return columns;
    } catch (IOException e) {
        LOG.warn(e.getMessage(), e);
        throw getIOError(e);
    } finally {
        closeTable(table);
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ColumnDescriptor(org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) TreeMap(java.util.TreeMap) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) ByteBuffer(java.nio.ByteBuffer) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 8 with ColumnDescriptor

use of org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor in project hbase by apache.

the class ThriftUtilities method colDescFromHbase.

/**
   * This utility method creates a new Thrift ColumnDescriptor "struct" based on
   * an Hbase HColumnDescriptor object.
   *
   * @param in
   *          Hbase HColumnDescriptor object
   * @return Thrift ColumnDescriptor
   */
public static ColumnDescriptor colDescFromHbase(HColumnDescriptor in) {
    ColumnDescriptor col = new ColumnDescriptor();
    col.name = ByteBuffer.wrap(Bytes.add(in.getName(), KeyValue.COLUMN_FAMILY_DELIM_ARRAY));
    col.maxVersions = in.getMaxVersions();
    col.compression = in.getCompressionType().toString();
    col.inMemory = in.isInMemory();
    col.blockCacheEnabled = in.isBlockCacheEnabled();
    col.bloomFilterType = in.getBloomFilterType().toString();
    col.timeToLive = in.getTimeToLive();
    return col;
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) ColumnDescriptor(org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor)

Example 9 with ColumnDescriptor

use of org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor in project whirr by apache.

the class HBaseOldServiceTest method test.

@Test(timeout = TestConstants.ITEST_TIMEOUT)
public void test() throws Exception {
    ArrayList<ColumnDescriptor> columns = new ArrayList<ColumnDescriptor>();
    ColumnDescriptor cd = new ColumnDescriptor();
    cd.name = FAMILY1;
    columns.add(cd);
    cd = new ColumnDescriptor();
    cd.name = FAMILY2;
    columns.add(cd);
    Hbase.Client client = controller.getThriftClient();
    client.createTable(TABLE, columns);
    ArrayList<Mutation> mutations = new ArrayList<Mutation>();
    mutations.add(new Mutation(false, COLUMN, VALUE));
    client.mutateRow(TABLE, ROW, mutations);
    int scan1 = client.scannerOpen(TABLE, FIRST, Lists.newArrayList(FAMILY1));
    List<TRowResult> rows = client.scannerGet(scan1);
    assertThat(rows.size(), is(1));
    assertThat(Bytes.toString(rows.get(0).getRow()), is("testRow"));
    assertTrue("No more rows", client.scannerGet(scan1).isEmpty());
    client.scannerClose(scan1);
    int scan2 = client.scannerOpen(TABLE, FIRST, Lists.newArrayList(FAMILY2));
    assertTrue("No more rows", client.scannerGet(scan2).isEmpty());
    client.scannerClose(scan2);
}
Also used : ColumnDescriptor(org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor) ArrayList(java.util.ArrayList) Mutation(org.apache.hadoop.hbase.thrift.generated.Mutation) TRowResult(org.apache.hadoop.hbase.thrift.generated.TRowResult) Hbase(org.apache.hadoop.hbase.thrift.generated.Hbase) Test(org.junit.Test)

Example 10 with ColumnDescriptor

use of org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor in project hbase by apache.

the class DemoClient method run.

private void run() throws Exception {
    TTransport transport = new TSocket(host, port);
    if (secure) {
        Map<String, String> saslProperties = new HashMap<>();
        saslProperties.put(Sasl.QOP, "auth-conf,auth-int,auth");
        /*
       * The Thrift server the DemoClient is trying to connect to
       * must have a matching principal, and support authentication.
       *
       * The HBase cluster must be secure, allow proxy user.
       */
        transport = new TSaslClientTransport("GSSAPI", null, // Thrift server user name, should be an authorized proxy user.
        serverPrincipal, // Thrift server domain
        host, saslProperties, null, transport);
    }
    transport.open();
    TProtocol protocol = new TBinaryProtocol(transport, true, true);
    Hbase.Client client = new Hbase.Client(protocol);
    ByteBuffer demoTable = ByteBuffer.wrap(bytes("demo_table"));
    ByteBuffer disabledTable = ByteBuffer.wrap(bytes("disabled_table"));
    // Scan all tables, look for the demo table and delete it.
    System.out.println("scanning tables...");
    for (ByteBuffer name : client.getTableNames()) {
        System.out.println("  found: " + ClientUtils.utf8(name.array()));
        if (name.equals(demoTable) || name.equals(disabledTable)) {
            if (client.isTableEnabled(name)) {
                System.out.println("    disabling table: " + ClientUtils.utf8(name.array()));
                client.disableTable(name);
            }
            System.out.println("    deleting table: " + ClientUtils.utf8(name.array()));
            client.deleteTable(name);
        }
    }
    // Create the demo table with two column families, entry: and unused:
    ArrayList<ColumnDescriptor> columns = new ArrayList<>(2);
    ColumnDescriptor col;
    col = new ColumnDescriptor();
    col.name = ByteBuffer.wrap(bytes("entry:"));
    col.timeToLive = Integer.MAX_VALUE;
    col.maxVersions = 10;
    columns.add(col);
    col = new ColumnDescriptor();
    col.name = ByteBuffer.wrap(bytes("unused:"));
    col.timeToLive = Integer.MAX_VALUE;
    columns.add(col);
    System.out.println("creating table: " + ClientUtils.utf8(demoTable.array()));
    try {
        client.createTable(demoTable, columns);
        client.createTable(disabledTable, columns);
    } catch (AlreadyExists ae) {
        System.out.println("WARN: " + ae.message);
    }
    System.out.println("column families in " + ClientUtils.utf8(demoTable.array()) + ": ");
    Map<ByteBuffer, ColumnDescriptor> columnMap = client.getColumnDescriptors(demoTable);
    for (ColumnDescriptor col2 : columnMap.values()) {
        System.out.println("  column: " + ClientUtils.utf8(col2.name.array()) + ", maxVer: " + col2.maxVersions);
    }
    if (client.isTableEnabled(disabledTable)) {
        System.out.println("disabling table: " + ClientUtils.utf8(disabledTable.array()));
        client.disableTable(disabledTable);
    }
    System.out.println("list tables with enabled statuses : ");
    Map<ByteBuffer, Boolean> statusMap = client.getTableNamesWithIsTableEnabled();
    for (Map.Entry<ByteBuffer, Boolean> entry : statusMap.entrySet()) {
        System.out.println(" Table: " + ClientUtils.utf8(entry.getKey().array()) + ", is enabled: " + entry.getValue());
    }
    Map<ByteBuffer, ByteBuffer> dummyAttributes = null;
    boolean writeToWal = false;
    // Test UTF-8 handling
    byte[] invalid = { (byte) 'f', (byte) 'o', (byte) 'o', (byte) '-', (byte) 0xfc, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1 };
    byte[] valid = { (byte) 'f', (byte) 'o', (byte) 'o', (byte) '-', (byte) 0xE7, (byte) 0x94, (byte) 0x9F, (byte) 0xE3, (byte) 0x83, (byte) 0x93, (byte) 0xE3, (byte) 0x83, (byte) 0xBC, (byte) 0xE3, (byte) 0x83, (byte) 0xAB };
    ArrayList<Mutation> mutations;
    // non-utf8 is fine for data
    mutations = new ArrayList<>(1);
    mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(invalid), writeToWal));
    client.mutateRow(demoTable, ByteBuffer.wrap(bytes("foo")), mutations, dummyAttributes);
    // this row name is valid utf8
    mutations = new ArrayList<>(1);
    mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(valid), writeToWal));
    client.mutateRow(demoTable, ByteBuffer.wrap(valid), mutations, dummyAttributes);
    // non-utf8 is now allowed in row names because HBase stores values as binary
    mutations = new ArrayList<>(1);
    mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(invalid), writeToWal));
    client.mutateRow(demoTable, ByteBuffer.wrap(invalid), mutations, dummyAttributes);
    // Run a scanner on the rows we just created
    ArrayList<ByteBuffer> columnNames = new ArrayList<>();
    columnNames.add(ByteBuffer.wrap(bytes("entry:")));
    System.out.println("Starting scanner...");
    int scanner = client.scannerOpen(demoTable, ByteBuffer.wrap(bytes("")), columnNames, dummyAttributes);
    while (true) {
        List<TRowResult> entry = client.scannerGet(scanner);
        if (entry.isEmpty()) {
            break;
        }
        printRow(entry);
    }
    // Run some operations on a bunch of rows
    for (int i = 100; i >= 0; --i) {
        // format row keys as "00000" to "00100"
        NumberFormat nf = NumberFormat.getInstance();
        nf.setMinimumIntegerDigits(5);
        nf.setGroupingUsed(false);
        byte[] row = bytes(nf.format(i));
        mutations = new ArrayList<>(1);
        mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("unused:")), ByteBuffer.wrap(bytes("DELETE_ME")), writeToWal));
        client.mutateRow(demoTable, ByteBuffer.wrap(row), mutations, dummyAttributes);
        printRow(client.getRow(demoTable, ByteBuffer.wrap(row), dummyAttributes));
        client.deleteAllRow(demoTable, ByteBuffer.wrap(row), dummyAttributes);
        // sleep to force later timestamp
        try {
            Thread.sleep(50);
        } catch (InterruptedException e) {
        // no-op
        }
        mutations = new ArrayList<>(2);
        mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:num")), ByteBuffer.wrap(bytes("0")), writeToWal));
        mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(bytes("FOO")), writeToWal));
        client.mutateRow(demoTable, ByteBuffer.wrap(row), mutations, dummyAttributes);
        printRow(client.getRow(demoTable, ByteBuffer.wrap(row), dummyAttributes));
        Mutation m;
        mutations = new ArrayList<>(2);
        m = new Mutation();
        m.column = ByteBuffer.wrap(bytes("entry:foo"));
        m.isDelete = true;
        mutations.add(m);
        m = new Mutation();
        m.column = ByteBuffer.wrap(bytes("entry:num"));
        m.value = ByteBuffer.wrap(bytes("-1"));
        mutations.add(m);
        client.mutateRow(demoTable, ByteBuffer.wrap(row), mutations, dummyAttributes);
        printRow(client.getRow(demoTable, ByteBuffer.wrap(row), dummyAttributes));
        mutations = new ArrayList<>();
        mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:num")), ByteBuffer.wrap(bytes(Integer.toString(i))), writeToWal));
        mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:sqr")), ByteBuffer.wrap(bytes(Integer.toString(i * i))), writeToWal));
        client.mutateRow(demoTable, ByteBuffer.wrap(row), mutations, dummyAttributes);
        printRow(client.getRow(demoTable, ByteBuffer.wrap(row), dummyAttributes));
        // sleep to force later timestamp
        try {
            Thread.sleep(50);
        } catch (InterruptedException e) {
        // no-op
        }
        mutations.clear();
        m = new Mutation();
        m.column = ByteBuffer.wrap(bytes("entry:num"));
        m.value = ByteBuffer.wrap(bytes("-999"));
        mutations.add(m);
        m = new Mutation();
        m.column = ByteBuffer.wrap(bytes("entry:sqr"));
        m.isDelete = true;
        client.mutateRowTs(demoTable, ByteBuffer.wrap(row), mutations, 1, // shouldn't override latest
        dummyAttributes);
        printRow(client.getRow(demoTable, ByteBuffer.wrap(row), dummyAttributes));
        List<TCell> versions = client.getVer(demoTable, ByteBuffer.wrap(row), ByteBuffer.wrap(bytes("entry:num")), 10, dummyAttributes);
        printVersions(ByteBuffer.wrap(row), versions);
        if (versions.isEmpty()) {
            System.out.println("FATAL: wrong # of versions");
            System.exit(-1);
        }
        List<TCell> result = client.get(demoTable, ByteBuffer.wrap(row), ByteBuffer.wrap(bytes("entry:foo")), dummyAttributes);
        if (!result.isEmpty()) {
            System.out.println("FATAL: shouldn't get here");
            System.exit(-1);
        }
        System.out.println("");
    }
    // scan all rows/columnNames
    columnNames.clear();
    for (ColumnDescriptor col2 : client.getColumnDescriptors(demoTable).values()) {
        System.out.println("column with name: " + new String(col2.name.array()));
        System.out.println(col2.toString());
        columnNames.add(col2.name);
    }
    System.out.println("Starting scanner...");
    scanner = client.scannerOpenWithStop(demoTable, ByteBuffer.wrap(bytes("00020")), ByteBuffer.wrap(bytes("00040")), columnNames, dummyAttributes);
    while (true) {
        List<TRowResult> entry = client.scannerGet(scanner);
        if (entry.isEmpty()) {
            System.out.println("Scanner finished");
            break;
        }
        printRow(entry);
    }
    transport.close();
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) TSaslClientTransport(org.apache.thrift.transport.TSaslClientTransport) TProtocol(org.apache.thrift.protocol.TProtocol) TSocket(org.apache.thrift.transport.TSocket) ColumnDescriptor(org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor) TCell(org.apache.hadoop.hbase.thrift.generated.TCell) ByteBuffer(java.nio.ByteBuffer) TBinaryProtocol(org.apache.thrift.protocol.TBinaryProtocol) TTransport(org.apache.thrift.transport.TTransport) Mutation(org.apache.hadoop.hbase.thrift.generated.Mutation) AlreadyExists(org.apache.hadoop.hbase.thrift.generated.AlreadyExists) TRowResult(org.apache.hadoop.hbase.thrift.generated.TRowResult) Hbase(org.apache.hadoop.hbase.thrift.generated.Hbase) HashMap(java.util.HashMap) Map(java.util.Map) NumberFormat(java.text.NumberFormat)

Aggregations

ColumnDescriptor (org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor)11 ArrayList (java.util.ArrayList)7 Hbase (org.apache.hadoop.hbase.thrift.generated.Hbase)6 Mutation (org.apache.hadoop.hbase.thrift.generated.Mutation)5 TRowResult (org.apache.hadoop.hbase.thrift.generated.TRowResult)5 Test (org.junit.Test)4 ByteBuffer (java.nio.ByteBuffer)3 AlreadyExists (org.apache.hadoop.hbase.thrift.generated.AlreadyExists)3 IOException (java.io.IOException)2 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)2 TBinaryProtocol (org.apache.thrift.protocol.TBinaryProtocol)2 TProtocol (org.apache.thrift.protocol.TProtocol)2 TSocket (org.apache.thrift.transport.TSocket)2 TTransport (org.apache.thrift.transport.TTransport)2 NumberFormat (java.text.NumberFormat)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1 TreeMap (java.util.TreeMap)1 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)1 TableName (org.apache.hadoop.hbase.TableName)1