Search in sources :

Example 1 with Cluster

use of org.apache.hadoop.hbase.rest.client.Cluster in project hbase by apache.

the class TestVersionResource method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(TEST_UTIL.getConfiguration());
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    context = JAXBContext.newInstance(VersionModel.class, StorageClusterVersionModel.class);
}
Also used : Cluster(org.apache.hadoop.hbase.rest.client.Cluster) Client(org.apache.hadoop.hbase.rest.client.Client) StorageClusterVersionModel(org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel) StorageClusterVersionModel(org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel) VersionModel(org.apache.hadoop.hbase.rest.model.VersionModel) BeforeClass(org.junit.BeforeClass)

Example 2 with Cluster

use of org.apache.hadoop.hbase.rest.client.Cluster in project hbase by apache.

the class TestScannersWithFilters method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    TEST_UTIL.startMiniCluster(3);
    REST_TEST_UTIL.startServletContainer(TEST_UTIL.getConfiguration());
    context = JAXBContext.newInstance(CellModel.class, CellSetModel.class, RowModel.class, ScannerModel.class);
    marshaller = context.createMarshaller();
    unmarshaller = context.createUnmarshaller();
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    Admin admin = TEST_UTIL.getAdmin();
    if (!admin.tableExists(TABLE)) {
        HTableDescriptor htd = new HTableDescriptor(TABLE);
        htd.addFamily(new HColumnDescriptor(FAMILIES[0]));
        htd.addFamily(new HColumnDescriptor(FAMILIES[1]));
        admin.createTable(htd);
        Table table = TEST_UTIL.getConnection().getTable(TABLE);
        // Insert first half
        for (byte[] ROW : ROWS_ONE) {
            Put p = new Put(ROW);
            p.setDurability(Durability.SKIP_WAL);
            for (byte[] QUALIFIER : QUALIFIERS_ONE) {
                p.addColumn(FAMILIES[0], QUALIFIER, VALUES[0]);
            }
            table.put(p);
        }
        for (byte[] ROW : ROWS_TWO) {
            Put p = new Put(ROW);
            p.setDurability(Durability.SKIP_WAL);
            for (byte[] QUALIFIER : QUALIFIERS_TWO) {
                p.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
            }
            table.put(p);
        }
        // Insert second half (reverse families)
        for (byte[] ROW : ROWS_ONE) {
            Put p = new Put(ROW);
            p.setDurability(Durability.SKIP_WAL);
            for (byte[] QUALIFIER : QUALIFIERS_ONE) {
                p.addColumn(FAMILIES[1], QUALIFIER, VALUES[0]);
            }
            table.put(p);
        }
        for (byte[] ROW : ROWS_TWO) {
            Put p = new Put(ROW);
            p.setDurability(Durability.SKIP_WAL);
            for (byte[] QUALIFIER : QUALIFIERS_TWO) {
                p.addColumn(FAMILIES[0], QUALIFIER, VALUES[1]);
            }
            table.put(p);
        }
        // Delete the second qualifier from all rows and families
        for (byte[] ROW : ROWS_ONE) {
            Delete d = new Delete(ROW);
            d.addColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
            d.addColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
            table.delete(d);
        }
        for (byte[] ROW : ROWS_TWO) {
            Delete d = new Delete(ROW);
            d.addColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
            d.addColumns(FAMILIES[1], QUALIFIERS_TWO[1]);
            table.delete(d);
        }
        colsPerRow -= 2;
        // Delete the second rows from both groups, one column at a time
        for (byte[] QUALIFIER : QUALIFIERS_ONE) {
            Delete d = new Delete(ROWS_ONE[1]);
            d.addColumns(FAMILIES[0], QUALIFIER);
            d.addColumns(FAMILIES[1], QUALIFIER);
            table.delete(d);
        }
        for (byte[] QUALIFIER : QUALIFIERS_TWO) {
            Delete d = new Delete(ROWS_TWO[1]);
            d.addColumns(FAMILIES[0], QUALIFIER);
            d.addColumns(FAMILIES[1], QUALIFIER);
            table.delete(d);
        }
        numRows -= 2;
        table.close();
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) CellSetModel(org.apache.hadoop.hbase.rest.model.CellSetModel) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Cluster(org.apache.hadoop.hbase.rest.client.Cluster) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) CellModel(org.apache.hadoop.hbase.rest.model.CellModel) RowModel(org.apache.hadoop.hbase.rest.model.RowModel) Client(org.apache.hadoop.hbase.rest.client.Client) ScannerModel(org.apache.hadoop.hbase.rest.model.ScannerModel) BeforeClass(org.junit.BeforeClass)

Example 3 with Cluster

use of org.apache.hadoop.hbase.rest.client.Cluster in project hbase by apache.

the class TestMultiRowResource method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    conf.setBoolean(RESTServer.REST_CSRF_ENABLED_KEY, csrfEnabled);
    extraHdr = new BasicHeader(RESTServer.REST_CSRF_CUSTOM_HEADER_DEFAULT, "");
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(conf);
    context = JAXBContext.newInstance(CellModel.class, CellSetModel.class, RowModel.class);
    marshaller = context.createMarshaller();
    unmarshaller = context.createUnmarshaller();
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    Admin admin = TEST_UTIL.getAdmin();
    if (admin.tableExists(TABLE)) {
        return;
    }
    HTableDescriptor htd = new HTableDescriptor(TABLE);
    htd.addFamily(new HColumnDescriptor(CFA));
    htd.addFamily(new HColumnDescriptor(CFB));
    admin.createTable(htd);
}
Also used : CellSetModel(org.apache.hadoop.hbase.rest.model.CellSetModel) Cluster(org.apache.hadoop.hbase.rest.client.Cluster) CellModel(org.apache.hadoop.hbase.rest.model.CellModel) RowModel(org.apache.hadoop.hbase.rest.model.RowModel) Client(org.apache.hadoop.hbase.rest.client.Client) Admin(org.apache.hadoop.hbase.client.Admin) BasicHeader(org.apache.http.message.BasicHeader) BeforeClass(org.junit.BeforeClass)

Example 4 with Cluster

use of org.apache.hadoop.hbase.rest.client.Cluster in project hbase by apache.

the class TestNamespacesInstanceResource method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(conf);
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    testNamespacesInstanceModel = new TestNamespacesInstanceModel();
    context = JAXBContext.newInstance(NamespacesInstanceModel.class, TableListModel.class);
    jsonMapper = new JacksonJaxbJsonProvider().locateMapper(NamespacesInstanceModel.class, MediaType.APPLICATION_JSON_TYPE);
    NAMESPACE1_PROPS.put("key1", "value1");
    NAMESPACE2_PROPS.put("key2a", "value2a");
    NAMESPACE2_PROPS.put("key2b", "value2b");
    NAMESPACE3_PROPS.put("key3", "value3");
    NAMESPACE4_PROPS.put("key4a", "value4a");
    NAMESPACE4_PROPS.put("key4b", "value4b");
}
Also used : TestNamespacesInstanceModel(org.apache.hadoop.hbase.rest.model.TestNamespacesInstanceModel) NamespacesInstanceModel(org.apache.hadoop.hbase.rest.model.NamespacesInstanceModel) TableListModel(org.apache.hadoop.hbase.rest.model.TableListModel) TestNamespacesInstanceModel(org.apache.hadoop.hbase.rest.model.TestNamespacesInstanceModel) Cluster(org.apache.hadoop.hbase.rest.client.Cluster) JacksonJaxbJsonProvider(org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider) Client(org.apache.hadoop.hbase.rest.client.Client) BeforeClass(org.junit.BeforeClass)

Example 5 with Cluster

use of org.apache.hadoop.hbase.rest.client.Cluster in project hbase by apache.

the class TestNamespacesResource method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(conf);
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    testNamespacesModel = new TestNamespacesModel();
    context = JAXBContext.newInstance(NamespacesModel.class);
}
Also used : NamespacesModel(org.apache.hadoop.hbase.rest.model.NamespacesModel) TestNamespacesModel(org.apache.hadoop.hbase.rest.model.TestNamespacesModel) Cluster(org.apache.hadoop.hbase.rest.client.Cluster) TestNamespacesModel(org.apache.hadoop.hbase.rest.model.TestNamespacesModel) Client(org.apache.hadoop.hbase.rest.client.Client) BeforeClass(org.junit.BeforeClass)

Aggregations

Client (org.apache.hadoop.hbase.rest.client.Client)14 Cluster (org.apache.hadoop.hbase.rest.client.Cluster)14 BeforeClass (org.junit.BeforeClass)14 Admin (org.apache.hadoop.hbase.client.Admin)7 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)6 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)6 CellModel (org.apache.hadoop.hbase.rest.model.CellModel)5 CellSetModel (org.apache.hadoop.hbase.rest.model.CellSetModel)5 RowModel (org.apache.hadoop.hbase.rest.model.RowModel)5 ScannerModel (org.apache.hadoop.hbase.rest.model.ScannerModel)3 Put (org.apache.hadoop.hbase.client.Put)2 Table (org.apache.hadoop.hbase.client.Table)2 TableListModel (org.apache.hadoop.hbase.rest.model.TableListModel)2 BasicHeader (org.apache.http.message.BasicHeader)2 JacksonJaxbJsonProvider (org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider)2 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 HRegionLocation (org.apache.hadoop.hbase.HRegionLocation)1 Waiter (org.apache.hadoop.hbase.Waiter)1 Connection (org.apache.hadoop.hbase.client.Connection)1