Search in sources :

Example 11 with Connection

use of org.apache.hadoop.hbase.client.Connection in project hadoop by apache.

the class TestHBaseStorageFlowActivity method checkFlowActivityTable.

private void checkFlowActivityTable(String cluster, String user, String flow, String flowVersion, long runid, Configuration c1, long appCreatedTime) throws IOException {
    Scan s = new Scan();
    s.addFamily(FlowActivityColumnFamily.INFO.getBytes());
    byte[] startRow = new FlowActivityRowKey(cluster, appCreatedTime, user, flow).getRowKey();
    s.setStartRow(startRow);
    String clusterStop = cluster + "1";
    byte[] stopRow = new FlowActivityRowKey(clusterStop, appCreatedTime, user, flow).getRowKey();
    s.setStopRow(stopRow);
    Connection conn = ConnectionFactory.createConnection(c1);
    Table table1 = conn.getTable(TableName.valueOf(FlowActivityTable.DEFAULT_TABLE_NAME));
    ResultScanner scanner = table1.getScanner(s);
    int rowCount = 0;
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        Map<byte[], byte[]> values = result.getFamilyMap(FlowActivityColumnFamily.INFO.getBytes());
        rowCount++;
        byte[] row = result.getRow();
        FlowActivityRowKey flowActivityRowKey = FlowActivityRowKey.parseRowKey(row);
        assertNotNull(flowActivityRowKey);
        assertEquals(cluster, flowActivityRowKey.getClusterId());
        assertEquals(user, flowActivityRowKey.getUserId());
        assertEquals(flow, flowActivityRowKey.getFlowName());
        Long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(appCreatedTime);
        assertEquals(dayTs, flowActivityRowKey.getDayTimestamp());
        assertEquals(1, values.size());
        checkFlowActivityRunId(runid, flowVersion, values);
    }
    assertEquals(1, rowCount);
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 12 with Connection

use of org.apache.hadoop.hbase.client.Connection in project hadoop by apache.

the class TestHBaseStorageFlowRun method checkMinMaxFlush.

private void checkMinMaxFlush(Configuration c1, long minTS, long startTs, int count, String cluster, String user, String flow, long runid, boolean checkMax) throws IOException {
    Connection conn = ConnectionFactory.createConnection(c1);
    // check in flow run table
    Table table1 = conn.getTable(TableName.valueOf(FlowRunTable.DEFAULT_TABLE_NAME));
    // scan the table and see that we get back the right min and max
    // timestamps
    byte[] startRow = new FlowRunRowKey(cluster, user, flow, runid).getRowKey();
    Get g = new Get(startRow);
    g.addColumn(FlowRunColumnFamily.INFO.getBytes(), FlowRunColumn.MIN_START_TIME.getColumnQualifierBytes());
    g.addColumn(FlowRunColumnFamily.INFO.getBytes(), FlowRunColumn.MAX_END_TIME.getColumnQualifierBytes());
    Result r1 = table1.get(g);
    assertNotNull(r1);
    assertTrue(!r1.isEmpty());
    Map<byte[], byte[]> values = r1.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
    int start = 10;
    assertEquals(2, r1.size());
    long starttime = Bytes.toLong(values.get(FlowRunColumn.MIN_START_TIME.getColumnQualifierBytes()));
    assertEquals(minTS, starttime);
    if (checkMax) {
        assertEquals(startTs + 2 * (count - start) + TestFlowDataGenerator.END_TS_INCR, Bytes.toLong(values.get(FlowRunColumn.MAX_END_TIME.getColumnQualifierBytes())));
    }
}
Also used : EntityTable(org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable) Table(org.apache.hadoop.hbase.client.Table) Get(org.apache.hadoop.hbase.client.Get) Connection(org.apache.hadoop.hbase.client.Connection) Result(org.apache.hadoop.hbase.client.Result)

Example 13 with Connection

use of org.apache.hadoop.hbase.client.Connection in project hadoop by apache.

the class TestHBaseStorageFlowRun method checkFlowRunTableBatchLimit.

/*
   * checks the batch limits on a scan
   */
void checkFlowRunTableBatchLimit(String cluster, String user, String flow, long runid, Configuration c1) throws IOException {
    Scan s = new Scan();
    s.addFamily(FlowRunColumnFamily.INFO.getBytes());
    byte[] startRow = new FlowRunRowKey(cluster, user, flow, runid).getRowKey();
    s.setStartRow(startRow);
    // set a batch limit
    int batchLimit = 2;
    s.setBatch(batchLimit);
    String clusterStop = cluster + "1";
    byte[] stopRow = new FlowRunRowKey(clusterStop, user, flow, runid).getRowKey();
    s.setStopRow(stopRow);
    Connection conn = ConnectionFactory.createConnection(c1);
    Table table1 = conn.getTable(TableName.valueOf(FlowRunTable.DEFAULT_TABLE_NAME));
    ResultScanner scanner = table1.getScanner(s);
    int loopCount = 0;
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        assertTrue(result.rawCells().length <= batchLimit);
        Map<byte[], byte[]> values = result.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
        assertNotNull(values);
        assertTrue(values.size() <= batchLimit);
        loopCount++;
    }
    assertTrue(loopCount > 0);
    // test with a diff batch limit
    s = new Scan();
    s.addFamily(FlowRunColumnFamily.INFO.getBytes());
    s.setStartRow(startRow);
    // set a batch limit
    batchLimit = 1;
    s.setBatch(batchLimit);
    s.setMaxResultsPerColumnFamily(2);
    s.setStopRow(stopRow);
    scanner = table1.getScanner(s);
    loopCount = 0;
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        assertEquals(batchLimit, result.rawCells().length);
        Map<byte[], byte[]> values = result.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
        assertNotNull(values);
        assertEquals(batchLimit, values.size());
        loopCount++;
    }
    assertTrue(loopCount > 0);
    // test with a diff batch limit
    // set it high enough
    // we expect back 3 since there are
    // column = m!HDFS_BYTES_READ value=57
    // column = m!MAP_SLOT_MILLIS value=141
    // column min_start_time value=1425016501000
    s = new Scan();
    s.addFamily(FlowRunColumnFamily.INFO.getBytes());
    s.setStartRow(startRow);
    // set a batch limit
    batchLimit = 100;
    s.setBatch(batchLimit);
    s.setStopRow(stopRow);
    scanner = table1.getScanner(s);
    loopCount = 0;
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        assertTrue(result.rawCells().length <= batchLimit);
        Map<byte[], byte[]> values = result.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
        assertNotNull(values);
        // assert that with every next invocation
        // we get back <= batchLimit values
        assertTrue(values.size() <= batchLimit);
        // see comment above
        assertTrue(values.size() == 3);
        loopCount++;
    }
    // should loop through only once
    assertTrue(loopCount == 1);
    // set it to a negative number
    // we expect all 3 back since there are
    // column = m!HDFS_BYTES_READ value=57
    // column = m!MAP_SLOT_MILLIS value=141
    // column min_start_time value=1425016501000
    s = new Scan();
    s.addFamily(FlowRunColumnFamily.INFO.getBytes());
    s.setStartRow(startRow);
    // set a batch limit
    batchLimit = -671;
    s.setBatch(batchLimit);
    s.setStopRow(stopRow);
    scanner = table1.getScanner(s);
    loopCount = 0;
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        assertEquals(3, result.rawCells().length);
        Map<byte[], byte[]> values = result.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
        assertNotNull(values);
        // assert that with every next invocation
        // we get back <= batchLimit values
        assertEquals(3, values.size());
        loopCount++;
    }
    // should loop through only once
    assertEquals(1, loopCount);
    // set it to 0
    // we expect all 3 back since there are
    // column = m!HDFS_BYTES_READ value=57
    // column = m!MAP_SLOT_MILLIS value=141
    // column min_start_time value=1425016501000
    s = new Scan();
    s.addFamily(FlowRunColumnFamily.INFO.getBytes());
    s.setStartRow(startRow);
    // set a batch limit
    batchLimit = 0;
    s.setBatch(batchLimit);
    s.setStopRow(stopRow);
    scanner = table1.getScanner(s);
    loopCount = 0;
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        assertEquals(3, result.rawCells().length);
        Map<byte[], byte[]> values = result.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
        assertNotNull(values);
        // assert that with every next invocation
        // we get back <= batchLimit values
        assertEquals(3, values.size());
        loopCount++;
    }
    // should loop through only once
    assertEquals(1, loopCount);
}
Also used : EntityTable(org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 14 with Connection

use of org.apache.hadoop.hbase.client.Connection in project hadoop by apache.

the class TestHBaseTimelineStorageApps method testWriteApplicationToHBase.

@Test
public void testWriteApplicationToHBase() throws Exception {
    TimelineEntities te = new TimelineEntities();
    ApplicationEntity entity = new ApplicationEntity();
    String appId = "application_1000178881110_2002";
    entity.setId(appId);
    Long cTime = 1425016501000L;
    entity.setCreatedTime(cTime);
    // add the info map in Timeline Entity
    Map<String, Object> infoMap = new HashMap<String, Object>();
    infoMap.put("infoMapKey1", "infoMapValue1");
    infoMap.put("infoMapKey2", 10);
    entity.addInfo(infoMap);
    // add the isRelatedToEntity info
    String key = "task";
    String value = "is_related_to_entity_id_here";
    Set<String> isRelatedToSet = new HashSet<String>();
    isRelatedToSet.add(value);
    Map<String, Set<String>> isRelatedTo = new HashMap<String, Set<String>>();
    isRelatedTo.put(key, isRelatedToSet);
    entity.setIsRelatedToEntities(isRelatedTo);
    // add the relatesTo info
    key = "container";
    value = "relates_to_entity_id_here";
    Set<String> relatesToSet = new HashSet<String>();
    relatesToSet.add(value);
    value = "relates_to_entity_id_here_Second";
    relatesToSet.add(value);
    Map<String, Set<String>> relatesTo = new HashMap<String, Set<String>>();
    relatesTo.put(key, relatesToSet);
    entity.setRelatesToEntities(relatesTo);
    // add some config entries
    Map<String, String> conf = new HashMap<String, String>();
    conf.put("config_param1", "value1");
    conf.put("config_param2", "value2");
    entity.addConfigs(conf);
    // add metrics
    Set<TimelineMetric> metrics = new HashSet<>();
    TimelineMetric m1 = new TimelineMetric();
    m1.setId("MAP_SLOT_MILLIS");
    Map<Long, Number> metricValues = new HashMap<Long, Number>();
    long ts = System.currentTimeMillis();
    metricValues.put(ts - 120000, 100000000);
    metricValues.put(ts - 100000, 200000000);
    metricValues.put(ts - 80000, 300000000);
    metricValues.put(ts - 60000, 400000000);
    metricValues.put(ts - 40000, 50000000000L);
    metricValues.put(ts - 20000, 60000000000L);
    m1.setType(Type.TIME_SERIES);
    m1.setValues(metricValues);
    metrics.add(m1);
    entity.addMetrics(metrics);
    // add aggregated metrics
    TimelineEntity aggEntity = new TimelineEntity();
    String type = TimelineEntityType.YARN_APPLICATION.toString();
    aggEntity.setId(appId);
    aggEntity.setType(type);
    long cTime2 = 1425016502000L;
    aggEntity.setCreatedTime(cTime2);
    TimelineMetric aggMetric = new TimelineMetric();
    aggMetric.setId("MEM_USAGE");
    Map<Long, Number> aggMetricValues = new HashMap<Long, Number>();
    long aggTs = ts;
    aggMetricValues.put(aggTs - 120000, 102400000L);
    aggMetric.setType(Type.SINGLE_VALUE);
    aggMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
    aggMetric.setValues(aggMetricValues);
    Set<TimelineMetric> aggMetrics = new HashSet<>();
    aggMetrics.add(aggMetric);
    entity.addMetrics(aggMetrics);
    te.addEntity(entity);
    HBaseTimelineWriterImpl hbi = null;
    try {
        Configuration c1 = util.getConfiguration();
        hbi = new HBaseTimelineWriterImpl();
        hbi.init(c1);
        hbi.start();
        String cluster = "cluster_test_write_app";
        String user = "user1";
        String flow = "s!ome_f\tlow  _n am!e";
        String flowVersion = "AB7822C10F1111";
        long runid = 1002345678919L;
        hbi.write(cluster, user, flow, flowVersion, runid, appId, te);
        // Write entity again, this time without created time.
        entity = new ApplicationEntity();
        appId = "application_1000178881110_2002";
        entity.setId(appId);
        // add the info map in Timeline Entity
        Map<String, Object> infoMap1 = new HashMap<>();
        infoMap1.put("infoMapKey3", "infoMapValue1");
        entity.addInfo(infoMap1);
        te = new TimelineEntities();
        te.addEntity(entity);
        hbi.write(cluster, user, flow, flowVersion, runid, appId, te);
        hbi.stop();
        infoMap.putAll(infoMap1);
        // retrieve the row
        ApplicationRowKey applicationRowKey = new ApplicationRowKey(cluster, user, flow, runid, appId);
        byte[] rowKey = applicationRowKey.getRowKey();
        Get get = new Get(rowKey);
        get.setMaxVersions(Integer.MAX_VALUE);
        Connection conn = ConnectionFactory.createConnection(c1);
        Result result = new ApplicationTable().getResult(c1, conn, get);
        assertTrue(result != null);
        assertEquals(17, result.size());
        // check the row key
        byte[] row1 = result.getRow();
        assertTrue(isApplicationRowKeyCorrect(row1, cluster, user, flow, runid, appId));
        // check info column family
        String id1 = ApplicationColumn.ID.readResult(result).toString();
        assertEquals(appId, id1);
        Long cTime1 = (Long) ApplicationColumn.CREATED_TIME.readResult(result);
        assertEquals(cTime, cTime1);
        Map<String, Object> infoColumns = ApplicationColumnPrefix.INFO.readResults(result, new StringKeyConverter());
        assertEquals(infoMap, infoColumns);
        // Remember isRelatedTo is of type Map<String, Set<String>>
        for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo.entrySet()) {
            Object isRelatedToValue = ApplicationColumnPrefix.IS_RELATED_TO.readResult(result, isRelatedToEntry.getKey());
            String compoundValue = isRelatedToValue.toString();
            // id7?id9?id6
            Set<String> isRelatedToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
            assertEquals(isRelatedTo.get(isRelatedToEntry.getKey()).size(), isRelatedToValues.size());
            for (String v : isRelatedToEntry.getValue()) {
                assertTrue(isRelatedToValues.contains(v));
            }
        }
        // RelatesTo
        for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo.entrySet()) {
            String compoundValue = ApplicationColumnPrefix.RELATES_TO.readResult(result, relatesToEntry.getKey()).toString();
            // id3?id4?id5
            Set<String> relatesToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
            assertEquals(relatesTo.get(relatesToEntry.getKey()).size(), relatesToValues.size());
            for (String v : relatesToEntry.getValue()) {
                assertTrue(relatesToValues.contains(v));
            }
        }
        KeyConverter<String> stringKeyConverter = new StringKeyConverter();
        // Configuration
        Map<String, Object> configColumns = ApplicationColumnPrefix.CONFIG.readResults(result, stringKeyConverter);
        assertEquals(conf, configColumns);
        NavigableMap<String, NavigableMap<Long, Number>> metricsResult = ApplicationColumnPrefix.METRIC.readResultsWithTimestamps(result, stringKeyConverter);
        NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
        matchMetrics(metricValues, metricMap);
        // read the timeline entity using the reader this time. In metrics limit
        // specify Integer MAX_VALUE. A TIME_SERIES will be returned(if more than
        // one value exists for a metric).
        TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), Integer.MAX_VALUE));
        assertNotNull(e1);
        // verify attributes
        assertEquals(appId, e1.getId());
        assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
        assertEquals(cTime, e1.getCreatedTime());
        Map<String, Object> infoMap2 = e1.getInfo();
        assertEquals(infoMap, infoMap2);
        Map<String, Set<String>> isRelatedTo2 = e1.getIsRelatedToEntities();
        assertEquals(isRelatedTo, isRelatedTo2);
        Map<String, Set<String>> relatesTo2 = e1.getRelatesToEntities();
        assertEquals(relatesTo, relatesTo2);
        Map<String, String> conf2 = e1.getConfigs();
        assertEquals(conf, conf2);
        Set<TimelineMetric> metrics2 = e1.getMetrics();
        assertEquals(2, metrics2.size());
        for (TimelineMetric metric2 : metrics2) {
            Map<Long, Number> metricValues2 = metric2.getValues();
            assertTrue(metric2.getId().equals("MAP_SLOT_MILLIS") || metric2.getId().equals("MEM_USAGE"));
            if (metric2.getId().equals("MAP_SLOT_MILLIS")) {
                assertEquals(6, metricValues2.size());
                matchMetrics(metricValues, metricValues2);
            }
            if (metric2.getId().equals("MEM_USAGE")) {
                assertEquals(1, metricValues2.size());
                matchMetrics(aggMetricValues, metricValues2);
            }
        }
        // In metrics limit specify a value of 3. No more than 3 values for a
        // metric will be returned.
        e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), 3));
        assertNotNull(e1);
        assertEquals(appId, e1.getId());
        assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
        assertEquals(conf, e1.getConfigs());
        metrics2 = e1.getMetrics();
        assertEquals(2, metrics2.size());
        for (TimelineMetric metric2 : metrics2) {
            Map<Long, Number> metricValues2 = metric2.getValues();
            assertTrue(metricValues2.size() <= 3);
            assertTrue(metric2.getId().equals("MAP_SLOT_MILLIS") || metric2.getId().equals("MEM_USAGE"));
        }
        // Check if single value(latest value) instead of time series is returned
        // if metricslimit is not set(null), irrespective of number of metric
        // values.
        e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), null));
        assertNotNull(e1);
        assertEquals(appId, e1.getId());
        assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
        assertEquals(cTime, e1.getCreatedTime());
        assertEquals(infoMap, e1.getInfo());
        assertEquals(isRelatedTo, e1.getIsRelatedToEntities());
        assertEquals(relatesTo, e1.getRelatesToEntities());
        assertEquals(conf, e1.getConfigs());
        assertEquals(2, e1.getMetrics().size());
        for (TimelineMetric metric : e1.getMetrics()) {
            assertEquals(1, metric.getValues().size());
            assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
            assertTrue(metric.getId().equals("MAP_SLOT_MILLIS") || metric.getId().equals("MEM_USAGE"));
            assertEquals(1, metric.getValues().size());
            if (metric.getId().equals("MAP_SLOT_MILLIS")) {
                assertTrue(metric.getValues().containsKey(ts - 20000));
                assertEquals(metricValues.get(ts - 20000), metric.getValues().get(ts - 20000));
            }
            if (metric.getId().equals("MEM_USAGE")) {
                assertTrue(metric.getValues().containsKey(aggTs - 120000));
                assertEquals(aggMetricValues.get(aggTs - 120000), metric.getValues().get(aggTs - 120000));
            }
        }
    } finally {
        if (hbi != null) {
            hbi.stop();
            hbi.close();
        }
    }
}
Also used : StringKeyConverter(org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter) TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) EnumSet(java.util.EnumSet) Set(java.util.Set) NavigableSet(java.util.NavigableSet) HashSet(java.util.HashSet) Configuration(org.apache.hadoop.conf.Configuration) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap) TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) Result(org.apache.hadoop.hbase.client.Result) ApplicationTable(org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) HashSet(java.util.HashSet) Connection(org.apache.hadoop.hbase.client.Connection) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) ApplicationEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity) ApplicationRowKey(org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey) Get(org.apache.hadoop.hbase.client.Get) Map(java.util.Map) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap) Test(org.junit.Test)

Example 15 with Connection

use of org.apache.hadoop.hbase.client.Connection in project hadoop by apache.

the class TestHBaseTimelineStorageApps method testWriteNullApplicationToHBase.

@Test
public void testWriteNullApplicationToHBase() throws Exception {
    TimelineEntities te = new TimelineEntities();
    ApplicationEntity entity = new ApplicationEntity();
    String appId = "application_1000178881110_2002";
    entity.setId(appId);
    long cTime = 1425016501000L;
    entity.setCreatedTime(cTime);
    // add the info map in Timeline Entity
    Map<String, Object> infoMap = new HashMap<String, Object>();
    infoMap.put("in fo M apK  ey1", "infoMapValue1");
    infoMap.put("infoMapKey2", 10);
    entity.addInfo(infoMap);
    te.addEntity(entity);
    HBaseTimelineWriterImpl hbi = null;
    try {
        Configuration c1 = util.getConfiguration();
        hbi = new HBaseTimelineWriterImpl();
        hbi.init(c1);
        hbi.start();
        String cluster = "cluster_check_null_application";
        String user = "user1check_null_application";
        //set the flow name to null
        String flow = null;
        String flowVersion = "AB7822C10F1111";
        long runid = 1002345678919L;
        hbi.write(cluster, user, flow, flowVersion, runid, appId, te);
        hbi.stop();
        // retrieve the row
        Scan scan = new Scan();
        scan.setStartRow(Bytes.toBytes(cluster));
        scan.setStopRow(Bytes.toBytes(cluster + "1"));
        Connection conn = ConnectionFactory.createConnection(c1);
        ResultScanner resultScanner = new ApplicationTable().getResultScanner(c1, conn, scan);
        assertTrue(resultScanner != null);
        // try to iterate over results
        int count = 0;
        for (Result rr = resultScanner.next(); rr != null; rr = resultScanner.next()) {
            count++;
        }
        // there should be no rows written
        // no exceptions thrown during write
        assertEquals(0, count);
    } finally {
        if (hbi != null) {
            hbi.stop();
            hbi.close();
        }
    }
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) Connection(org.apache.hadoop.hbase.client.Connection) Result(org.apache.hadoop.hbase.client.Result) ApplicationTable(org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) ApplicationEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Aggregations

Connection (org.apache.hadoop.hbase.client.Connection)297 Table (org.apache.hadoop.hbase.client.Table)191 Test (org.junit.Test)171 IOException (java.io.IOException)113 TableName (org.apache.hadoop.hbase.TableName)103 Result (org.apache.hadoop.hbase.client.Result)101 Admin (org.apache.hadoop.hbase.client.Admin)86 Scan (org.apache.hadoop.hbase.client.Scan)79 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)75 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)71 Put (org.apache.hadoop.hbase.client.Put)68 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)57 Delete (org.apache.hadoop.hbase.client.Delete)55 Configuration (org.apache.hadoop.conf.Configuration)53 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)51 Get (org.apache.hadoop.hbase.client.Get)48 InterruptedIOException (java.io.InterruptedIOException)45 Cell (org.apache.hadoop.hbase.Cell)41 CellScanner (org.apache.hadoop.hbase.CellScanner)34 ArrayList (java.util.ArrayList)25