Search in sources :

Example 6 with Cluster

use of org.apache.hadoop.hbase.rest.client.Cluster in project hbase by apache.

the class TestResourceFilter method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    TEST_UTIL.getConfiguration().set(Constants.FILTER_CLASSES, DummyFilter.class.getName());
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(TEST_UTIL.getConfiguration());
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
}
Also used : Cluster(org.apache.hadoop.hbase.rest.client.Cluster) Client(org.apache.hadoop.hbase.rest.client.Client) BeforeClass(org.junit.BeforeClass)

Example 7 with Cluster

use of org.apache.hadoop.hbase.rest.client.Cluster in project hbase by apache.

the class TestSchemaResource method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    conf.setBoolean(RESTServer.REST_CSRF_ENABLED_KEY, csrfEnabled);
    extraHdr = new BasicHeader(RESTServer.REST_CSRF_CUSTOM_HEADER_DEFAULT, "");
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(conf);
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    testTableSchemaModel = new TestTableSchemaModel();
    context = JAXBContext.newInstance(ColumnSchemaModel.class, TableSchemaModel.class);
}
Also used : Cluster(org.apache.hadoop.hbase.rest.client.Cluster) TestTableSchemaModel(org.apache.hadoop.hbase.rest.model.TestTableSchemaModel) TableSchemaModel(org.apache.hadoop.hbase.rest.model.TableSchemaModel) TestTableSchemaModel(org.apache.hadoop.hbase.rest.model.TestTableSchemaModel) Client(org.apache.hadoop.hbase.rest.client.Client) ColumnSchemaModel(org.apache.hadoop.hbase.rest.model.ColumnSchemaModel) BasicHeader(org.apache.http.message.BasicHeader) BeforeClass(org.junit.BeforeClass)

Example 8 with Cluster

use of org.apache.hadoop.hbase.rest.client.Cluster in project hbase by apache.

the class TestStatusResource method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    TEST_UTIL.startMiniCluster(1, 1);
    TEST_UTIL.createTable(TableName.valueOf("TestStatusResource"), Bytes.toBytes("D"));
    TEST_UTIL.createTable(TableName.valueOf("TestStatusResource2"), Bytes.toBytes("D"));
    REST_TEST_UTIL.startServletContainer(conf);
    Cluster cluster = new Cluster();
    cluster.add("localhost", REST_TEST_UTIL.getServletPort());
    client = new Client(cluster);
    context = JAXBContext.newInstance(StorageClusterStatusModel.class);
    TEST_UTIL.waitFor(6000, new Waiter.Predicate<IOException>() {

        @Override
        public boolean evaluate() throws IOException {
            return TEST_UTIL.getMiniHBaseCluster().getClusterStatus().getAverageLoad() > 0;
        }
    });
}
Also used : Cluster(org.apache.hadoop.hbase.rest.client.Cluster) StorageClusterStatusModel(org.apache.hadoop.hbase.rest.model.StorageClusterStatusModel) IOException(java.io.IOException) Client(org.apache.hadoop.hbase.rest.client.Client) Waiter(org.apache.hadoop.hbase.Waiter) BeforeClass(org.junit.BeforeClass)

Example 9 with Cluster

use of org.apache.hadoop.hbase.rest.client.Cluster in project hbase by apache.

the class TestTableResource method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    TEST_UTIL.startMiniCluster(3);
    REST_TEST_UTIL.startServletContainer(TEST_UTIL.getConfiguration());
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    context = JAXBContext.newInstance(TableModel.class, TableInfoModel.class, TableListModel.class, TableRegionModel.class);
    Admin admin = TEST_UTIL.getAdmin();
    if (admin.tableExists(TABLE)) {
        return;
    }
    HTableDescriptor htd = new HTableDescriptor(TABLE);
    htd.addFamily(new HColumnDescriptor(COLUMN_FAMILY));
    admin.createTable(htd);
    byte[] k = new byte[3];
    byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(COLUMN));
    List<Put> puts = new ArrayList<>();
    for (byte b1 = 'a'; b1 < 'z'; b1++) {
        for (byte b2 = 'a'; b2 < 'z'; b2++) {
            for (byte b3 = 'a'; b3 < 'z'; b3++) {
                k[0] = b1;
                k[1] = b2;
                k[2] = b3;
                Put put = new Put(k);
                put.setDurability(Durability.SKIP_WAL);
                put.addColumn(famAndQf[0], famAndQf[1], k);
                puts.add(put);
            }
        }
    }
    Connection connection = TEST_UTIL.getConnection();
    Table table = connection.getTable(TABLE);
    table.put(puts);
    table.close();
    // get the initial layout (should just be one region)
    RegionLocator regionLocator = connection.getRegionLocator(TABLE);
    List<HRegionLocation> m = regionLocator.getAllRegionLocations();
    assertEquals(m.size(), 1);
    // tell the master to split the table
    admin.split(TABLE);
    // give some time for the split to happen
    TestEndToEndSplitTransaction.blockUntilRegionSplit(TEST_UTIL.getConfiguration(), 60000, m.get(0).getRegionInfo().getRegionName(), true);
    long timeout = System.currentTimeMillis() + (15 * 1000);
    while (System.currentTimeMillis() < timeout && m.size() != 2) {
        try {
            Thread.sleep(250);
        } catch (InterruptedException e) {
            LOG.warn(StringUtils.stringifyException(e));
        }
        // check again
        m = regionLocator.getAllRegionLocations();
    }
    // should have two regions now
    assertEquals(m.size(), 2);
    regionMap = m;
    LOG.info("regions: " + regionMap);
    regionLocator.close();
}
Also used : RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) TableListModel(org.apache.hadoop.hbase.rest.model.TableListModel) Table(org.apache.hadoop.hbase.client.Table) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) ArrayList(java.util.ArrayList) Connection(org.apache.hadoop.hbase.client.Connection) Cluster(org.apache.hadoop.hbase.rest.client.Cluster) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) TableInfoModel(org.apache.hadoop.hbase.rest.model.TableInfoModel) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) TableRegionModel(org.apache.hadoop.hbase.rest.model.TableRegionModel) Client(org.apache.hadoop.hbase.rest.client.Client) TableModel(org.apache.hadoop.hbase.rest.model.TableModel) BeforeClass(org.junit.BeforeClass)

Example 10 with Cluster

use of org.apache.hadoop.hbase.rest.client.Cluster in project hbase by apache.

the class TestTableScan method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    conf.set(Constants.CUSTOM_FILTERS, "CustomFilter:" + CustomFilter.class.getName());
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(conf);
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    Admin admin = TEST_UTIL.getAdmin();
    if (!admin.tableExists(TABLE)) {
        HTableDescriptor htd = new HTableDescriptor(TABLE);
        htd.addFamily(new HColumnDescriptor(CFA));
        htd.addFamily(new HColumnDescriptor(CFB));
        admin.createTable(htd);
        expectedRows1 = TestScannerResource.insertData(conf, TABLE, COLUMN_1, 1.0);
        expectedRows2 = TestScannerResource.insertData(conf, TABLE, COLUMN_2, 0.5);
    }
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Cluster(org.apache.hadoop.hbase.rest.client.Cluster) Client(org.apache.hadoop.hbase.rest.client.Client) Admin(org.apache.hadoop.hbase.client.Admin) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) BeforeClass(org.junit.BeforeClass)

Aggregations

Client (org.apache.hadoop.hbase.rest.client.Client)14 Cluster (org.apache.hadoop.hbase.rest.client.Cluster)14 BeforeClass (org.junit.BeforeClass)14 Admin (org.apache.hadoop.hbase.client.Admin)7 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)6 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)6 CellModel (org.apache.hadoop.hbase.rest.model.CellModel)5 CellSetModel (org.apache.hadoop.hbase.rest.model.CellSetModel)5 RowModel (org.apache.hadoop.hbase.rest.model.RowModel)5 ScannerModel (org.apache.hadoop.hbase.rest.model.ScannerModel)3 Put (org.apache.hadoop.hbase.client.Put)2 Table (org.apache.hadoop.hbase.client.Table)2 TableListModel (org.apache.hadoop.hbase.rest.model.TableListModel)2 BasicHeader (org.apache.http.message.BasicHeader)2 JacksonJaxbJsonProvider (org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider)2 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 HRegionLocation (org.apache.hadoop.hbase.HRegionLocation)1 Waiter (org.apache.hadoop.hbase.Waiter)1 Connection (org.apache.hadoop.hbase.client.Connection)1