Search in sources :

Example 1 with ReplicationMetricList

use of org.apache.hadoop.hive.metastore.api.ReplicationMetricList in project hive by apache.

the class TestReplicationMetricSink method testSuccessBootstrapDumpMetrics.

@Test
public void testSuccessBootstrapDumpMetrics() throws Exception {
    ReplicationMetricCollector bootstrapDumpMetricCollector = new BootstrapDumpMetricCollector("testAcidTablesReplLoadBootstrapIncr_1592205875387", "hdfs://localhost:65158/tmp/org_apache_hadoop_hive_ql_parse_TestReplicationScenarios_245261428230295" + "/hrepl0/dGVzdGFjaWR0YWJsZXNyZXBsbG9hZGJvb3RzdHJhcGluY3JfMTU5MjIwNTg3NTM4Nw==/0/hive", conf);
    Map<String, Long> metricMap = new HashMap<>();
    metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10);
    metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1);
    bootstrapDumpMetricCollector.reportStageStart("dump", metricMap);
    bootstrapDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 1);
    bootstrapDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 2);
    bootstrapDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.FUNCTIONS.name(), 1);
    bootstrapDumpMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10, new SnapshotUtils.ReplSnapshotCount(), new ReplStatsTracker(0));
    bootstrapDumpMetricCollector.reportEnd(Status.SUCCESS);
    Metadata expectedMetadata = new Metadata("testAcidTablesReplLoadBootstrapIncr_1592205875387", Metadata.ReplicationType.BOOTSTRAP, "dummyDir");
    expectedMetadata.setLastReplId(10);
    Progress expectedProgress = new Progress();
    expectedProgress.setStatus(Status.SUCCESS);
    Stage dumpStage = new Stage("dump", Status.SUCCESS, 0);
    dumpStage.setEndTime(0);
    Metric expectedTableMetric = new Metric(ReplUtils.MetricName.TABLES.name(), 10);
    expectedTableMetric.setCurrentCount(3);
    Metric expectedFuncMetric = new Metric(ReplUtils.MetricName.FUNCTIONS.name(), 1);
    expectedFuncMetric.setCurrentCount(1);
    dumpStage.addMetric(expectedTableMetric);
    dumpStage.addMetric(expectedFuncMetric);
    expectedProgress.addStage(dumpStage);
    ReplicationMetric expectedMetric = new ReplicationMetric(1, "repl", 0, expectedMetadata);
    expectedMetric.setProgress(expectedProgress);
    Thread.sleep(1000 * 20);
    GetReplicationMetricsRequest metricsRequest = new GetReplicationMetricsRequest();
    metricsRequest.setPolicy("repl");
    ReplicationMetricList actualReplicationMetrics = Hive.get(conf).getMSC().getReplicationMetrics(metricsRequest);
    ReplicationMetrics actualThriftMetric = actualReplicationMetrics.getReplicationMetricList().get(0);
    ObjectMapper mapper = new ObjectMapper();
    ReplicationMetric actualMetric = new ReplicationMetric(actualThriftMetric.getScheduledExecutionId(), actualThriftMetric.getPolicy(), actualThriftMetric.getDumpExecutionId(), mapper.readValue(actualThriftMetric.getMetadata(), Metadata.class));
    actualMetric.setMessageFormat(actualThriftMetric.getMessageFormat());
    ProgressMapper progressMapper = mapper.readValue(deSerialize(actualThriftMetric.getProgress()), ProgressMapper.class);
    Progress progress = new Progress();
    progress.setStatus(progressMapper.getStatus());
    for (StageMapper stageMapper : progressMapper.getStages()) {
        Stage stage = new Stage();
        stage.setName(stageMapper.getName());
        stage.setStatus(stageMapper.getStatus());
        stage.setStartTime(stageMapper.getStartTime());
        stage.setEndTime(stageMapper.getEndTime());
        for (Metric metric : stageMapper.getMetrics()) {
            stage.addMetric(metric);
        }
        progress.addStage(stage);
    }
    actualMetric.setProgress(progress);
    checkSuccess(actualMetric, expectedMetric, "dump", Arrays.asList(ReplUtils.MetricName.TABLES.name(), ReplUtils.MetricName.FUNCTIONS.name()));
    // Incremental
    conf.set(Constants.SCHEDULED_QUERY_EXECUTIONID, "2");
    ReplicationMetricCollector incrementDumpMetricCollector = new IncrementalDumpMetricCollector("testAcidTablesReplLoadBootstrapIncr_1592205875387", "hdfs://localhost:65158/tmp/org_apache_hadoop_hive_ql_parse_TestReplicationScenarios_245261428230295" + "/hrepl0/dGVzdGFjaWR0YWJsZXNyZXBsbG9hZGJvb3RzdHJhcGluY3JfMTU5MjIwNTg3NTM4Nw==/0/hive", conf);
    metricMap = new HashMap<>();
    metricMap.put(ReplUtils.MetricName.EVENTS.name(), (long) 10);
    incrementDumpMetricCollector.reportStageStart("dump", metricMap);
    incrementDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.EVENTS.name(), 10);
    incrementDumpMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10, new SnapshotUtils.ReplSnapshotCount(), new ReplStatsTracker(0));
    incrementDumpMetricCollector.reportEnd(Status.SUCCESS);
    expectedMetadata = new Metadata("testAcidTablesReplLoadBootstrapIncr_1592205875387", Metadata.ReplicationType.INCREMENTAL, "dummyDir");
    expectedMetadata.setLastReplId(10);
    expectedProgress = new Progress();
    expectedProgress.setStatus(Status.SUCCESS);
    dumpStage = new Stage("dump", Status.SUCCESS, 0);
    dumpStage.setEndTime(0);
    Metric expectedEventsMetric = new Metric(ReplUtils.MetricName.EVENTS.name(), 10);
    expectedEventsMetric.setCurrentCount(10);
    dumpStage.addMetric(expectedEventsMetric);
    expectedProgress.addStage(dumpStage);
    expectedMetric = new ReplicationMetric(2, "repl", 0, expectedMetadata);
    expectedMetric.setProgress(expectedProgress);
    Thread.sleep(1000 * 20);
    metricsRequest = new GetReplicationMetricsRequest();
    metricsRequest.setPolicy("repl");
    actualReplicationMetrics = Hive.get(conf).getMSC().getReplicationMetrics(metricsRequest);
    Assert.assertEquals(2, actualReplicationMetrics.getReplicationMetricListSize());
    actualThriftMetric = actualReplicationMetrics.getReplicationMetricList().get(0);
    mapper = new ObjectMapper();
    actualMetric = new ReplicationMetric(actualThriftMetric.getScheduledExecutionId(), actualThriftMetric.getPolicy(), actualThriftMetric.getDumpExecutionId(), mapper.readValue(actualThriftMetric.getMetadata(), Metadata.class));
    actualMetric.setMessageFormat(actualThriftMetric.getMessageFormat());
    progressMapper = mapper.readValue(deSerialize(actualThriftMetric.getProgress()), ProgressMapper.class);
    progress = new Progress();
    progress.setStatus(progressMapper.getStatus());
    for (StageMapper stageMapper : progressMapper.getStages()) {
        Stage stage = new Stage();
        stage.setName(stageMapper.getName());
        stage.setStatus(stageMapper.getStatus());
        stage.setStartTime(stageMapper.getStartTime());
        stage.setEndTime(stageMapper.getEndTime());
        for (Metric metric : stageMapper.getMetrics()) {
            stage.addMetric(metric);
        }
        progress.addStage(stage);
    }
    actualMetric.setProgress(progress);
    checkSuccessIncremental(actualMetric, expectedMetric, "dump", Arrays.asList(ReplUtils.MetricName.EVENTS.name()));
    // Failover Metrics Sink
    Mockito.when(fmd.getFailoverEventId()).thenReturn(100L);
    Mockito.when(fmd.getFilePath()).thenReturn("hdfs://localhost:65158/tmp/org_apache_hadoop_hive_ql_parse_TestReplicationScenarios_245261428230295" + "/hrepl0/dGVzdGFjaWR0YWJsZXNyZXBsbG9hZGJvb3RzdHJhcGluY3JfMTU5MjIwNTg3NTM4Nw==/0/hive/");
    conf.set(Constants.SCHEDULED_QUERY_EXECUTIONID, "3");
    String stagingDir = "hdfs://localhost:65158/tmp/org_apache_hadoop_hive_ql_parse_TestReplicationScenarios_245261428230295" + "/hrepl0/dGVzdGFjaWR0YWJsZXNyZXBsbG9hZGJvb3RzdHJhcGluY3JfMTU5MjIwNTg3NTM4Nw==/0/hive/";
    ReplicationMetricCollector failoverDumpMetricCollector = new IncrementalDumpMetricCollector("testAcidTablesReplLoadBootstrapIncr_1592205875387", stagingDir, conf);
    metricMap = new HashMap<String, Long>() {

        {
            put(ReplUtils.MetricName.EVENTS.name(), (long) 10);
        }
    };
    failoverDumpMetricCollector.reportFailoverStart("dump", metricMap, fmd);
    failoverDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.EVENTS.name(), 10);
    failoverDumpMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10, new SnapshotUtils.ReplSnapshotCount(), new ReplStatsTracker(0));
    failoverDumpMetricCollector.reportEnd(Status.FAILOVER_READY);
    expectedMetadata = new Metadata("testAcidTablesReplLoadBootstrapIncr_1592205875387", Metadata.ReplicationType.INCREMENTAL, "dummyDir");
    expectedMetadata.setLastReplId(10);
    expectedMetadata.setFailoverEventId(100);
    expectedMetadata.setFailoverMetadataLoc(stagingDir + FailoverMetaData.FAILOVER_METADATA);
    expectedProgress = new Progress();
    expectedProgress.setStatus(Status.FAILOVER_READY);
    dumpStage = new Stage("dump", Status.SUCCESS, 0);
    dumpStage.setEndTime(0);
    expectedEventsMetric = new Metric(ReplUtils.MetricName.EVENTS.name(), 10);
    expectedEventsMetric.setCurrentCount(10);
    dumpStage.addMetric(expectedEventsMetric);
    expectedProgress.addStage(dumpStage);
    expectedMetric = new ReplicationMetric(3, "repl", 0, expectedMetadata);
    expectedMetric.setProgress(expectedProgress);
    Thread.sleep(1000 * 20);
    metricsRequest = new GetReplicationMetricsRequest();
    metricsRequest.setPolicy("repl");
    actualReplicationMetrics = Hive.get(conf).getMSC().getReplicationMetrics(metricsRequest);
    Assert.assertEquals(3, actualReplicationMetrics.getReplicationMetricListSize());
    actualThriftMetric = actualReplicationMetrics.getReplicationMetricList().get(0);
    mapper = new ObjectMapper();
    actualMetric = new ReplicationMetric(actualThriftMetric.getScheduledExecutionId(), actualThriftMetric.getPolicy(), actualThriftMetric.getDumpExecutionId(), mapper.readValue(actualThriftMetric.getMetadata(), Metadata.class));
    actualMetric.setMessageFormat(actualThriftMetric.getMessageFormat());
    progressMapper = mapper.readValue(deSerialize(actualThriftMetric.getProgress()), ProgressMapper.class);
    progress = new Progress();
    progress.setStatus(progressMapper.getStatus());
    for (StageMapper stageMapper : progressMapper.getStages()) {
        Stage stage = new Stage();
        stage.setName(stageMapper.getName());
        stage.setStatus(stageMapper.getStatus());
        stage.setStartTime(stageMapper.getStartTime());
        stage.setEndTime(stageMapper.getEndTime());
        for (Metric metric : stageMapper.getMetrics()) {
            stage.addMetric(metric);
        }
        progress.addStage(stage);
    }
    actualMetric.setProgress(progress);
    checkSuccessIncremental(actualMetric, expectedMetric, "dump", Arrays.asList(ReplUtils.MetricName.EVENTS.name()));
}
Also used : BootstrapDumpMetricCollector(org.apache.hadoop.hive.ql.parse.repl.dump.metric.BootstrapDumpMetricCollector) ReplStatsTracker(org.apache.hadoop.hive.ql.exec.repl.ReplStatsTracker) Progress(org.apache.hadoop.hive.ql.parse.repl.metric.event.Progress) ReplicationMetricList(org.apache.hadoop.hive.metastore.api.ReplicationMetricList) HashMap(java.util.HashMap) ReplicationMetrics(org.apache.hadoop.hive.metastore.api.ReplicationMetrics) ProgressMapper(org.apache.hadoop.hive.ql.parse.repl.metric.event.ProgressMapper) StageMapper(org.apache.hadoop.hive.ql.parse.repl.metric.event.StageMapper) Metadata(org.apache.hadoop.hive.ql.parse.repl.metric.event.Metadata) IncrementalDumpMetricCollector(org.apache.hadoop.hive.ql.parse.repl.dump.metric.IncrementalDumpMetricCollector) ReplicationMetric(org.apache.hadoop.hive.ql.parse.repl.metric.event.ReplicationMetric) GetReplicationMetricsRequest(org.apache.hadoop.hive.metastore.api.GetReplicationMetricsRequest) SnapshotUtils(org.apache.hadoop.hive.ql.exec.repl.util.SnapshotUtils) Stage(org.apache.hadoop.hive.ql.parse.repl.metric.event.Stage) Metric(org.apache.hadoop.hive.ql.parse.repl.metric.event.Metric) ReplicationMetric(org.apache.hadoop.hive.ql.parse.repl.metric.event.ReplicationMetric) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Example 2 with ReplicationMetricList

use of org.apache.hadoop.hive.metastore.api.ReplicationMetricList in project hive by apache.

the class TestReplicationMetricSink method testReplStatsInMetrics.

@Test
public void testReplStatsInMetrics() throws HiveException, InterruptedException, TException {
    int origRMProgress = ReplStatsTracker.RM_PROGRESS_LENGTH;
    ReplStatsTracker.RM_PROGRESS_LENGTH = 10;
    ReplicationMetricCollector incrementDumpMetricCollector = new IncrementalDumpMetricCollector("testAcidTablesReplLoadBootstrapIncr_1592205875387", "hdfs://localhost:65158/tmp/org_apache_hadoop_hive_ql_parse_TestReplicationScenarios_245261428230295" + "/hrepl0/dGVzdGFjaWR0YWJsZXNyZXBsbG9hZGJvb3RzdHJhcGluY3JfMTU5MjIwNTg3NTM4Nw==/0/hive", conf);
    Map<String, Long> metricMap = new HashMap<>();
    ReplStatsTracker repl = Mockito.mock(ReplStatsTracker.class);
    Mockito.when(repl.toString()).thenReturn(RandomStringUtils.randomAlphabetic(1000));
    metricMap.put(ReplUtils.MetricName.EVENTS.name(), (long) 10);
    incrementDumpMetricCollector.reportStageStart("dump", metricMap);
    incrementDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.EVENTS.name(), 10);
    incrementDumpMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10, new SnapshotUtils.ReplSnapshotCount(), repl);
    Thread.sleep(1000 * 20);
    GetReplicationMetricsRequest metricsRequest = new GetReplicationMetricsRequest();
    metricsRequest.setPolicy("repl");
    ReplicationMetricList actualReplicationMetrics = Hive.get(conf).getMSC().getReplicationMetrics(metricsRequest);
    String progress = deSerialize(actualReplicationMetrics.getReplicationMetricList().get(0).getProgress());
    assertTrue(progress, progress.contains("ERROR: RM_PROGRESS LIMIT EXCEEDED."));
    ReplStatsTracker.RM_PROGRESS_LENGTH = origRMProgress;
    // Testing K_MAX
    repl = new ReplStatsTracker(15);
    Assert.assertEquals(ReplStatsTracker.TOP_K_MAX, repl.getK());
}
Also used : ReplStatsTracker(org.apache.hadoop.hive.ql.exec.repl.ReplStatsTracker) GetReplicationMetricsRequest(org.apache.hadoop.hive.metastore.api.GetReplicationMetricsRequest) ReplicationMetricList(org.apache.hadoop.hive.metastore.api.ReplicationMetricList) HashMap(java.util.HashMap) SnapshotUtils(org.apache.hadoop.hive.ql.exec.repl.util.SnapshotUtils) IncrementalDumpMetricCollector(org.apache.hadoop.hive.ql.parse.repl.dump.metric.IncrementalDumpMetricCollector) Test(org.junit.Test)

Example 3 with ReplicationMetricList

use of org.apache.hadoop.hive.metastore.api.ReplicationMetricList in project hive by apache.

the class TestReplicationMetrics method testDeleteMetrics.

@Test
public void testDeleteMetrics() throws Exception {
    ObjectStore objStore = new ObjectStore();
    objStore.setConf(metaStore.getConf());
    objStore.deleteReplicationMetrics(0);
    ReplicationMetricList replicationMetricList = new ReplicationMetricList();
    List<ReplicationMetrics> replicationMetrics = new ArrayList<>();
    replicationMetrics.add(createReplicationMetric("repl1", 1L));
    replicationMetrics.add(createReplicationMetric("repl1", 2L));
    replicationMetricList.setReplicationMetricList(replicationMetrics);
    objStore.addReplicationMetrics(replicationMetricList);
    Thread.sleep(2000);
    replicationMetrics = new ArrayList<>();
    replicationMetrics.add(createReplicationMetric("repl1", 3L));
    replicationMetricList.setReplicationMetricList(replicationMetrics);
    objStore.addReplicationMetrics(replicationMetricList);
    Thread.sleep(500);
    GetReplicationMetricsRequest getReplicationMetricsRequest = new GetReplicationMetricsRequest();
    getReplicationMetricsRequest.setPolicy("repl1");
    ReplicationMetricList actualList = client.getReplicationMetrics(getReplicationMetricsRequest);
    assertEquals(3, actualList.getReplicationMetricListSize());
    // delete older metrics
    objStore.deleteReplicationMetrics(2);
    getReplicationMetricsRequest = new GetReplicationMetricsRequest();
    getReplicationMetricsRequest.setPolicy("repl1");
    actualList = client.getReplicationMetrics(getReplicationMetricsRequest);
    assertEquals(1, actualList.getReplicationMetricListSize());
    List<ReplicationMetrics> actualMetrics = actualList.getReplicationMetricList();
    ReplicationMetrics actualMetric0 = actualMetrics.get(0);
    assertEquals("repl1", actualMetric0.getPolicy());
    assertEquals(3L, actualMetric0.getScheduledExecutionId());
    assertEquals(1, actualMetric0.getDumpExecutionId());
    assertEquals("metadata", actualMetric0.getMetadata());
    assertEquals("progress", actualMetric0.getProgress());
}
Also used : GetReplicationMetricsRequest(org.apache.hadoop.hive.metastore.api.GetReplicationMetricsRequest) ObjectStore(org.apache.hadoop.hive.metastore.ObjectStore) ReplicationMetricList(org.apache.hadoop.hive.metastore.api.ReplicationMetricList) ReplicationMetrics(org.apache.hadoop.hive.metastore.api.ReplicationMetrics) ArrayList(java.util.ArrayList) Test(org.junit.Test) MetastoreUnitTest(org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest)

Example 4 with ReplicationMetricList

use of org.apache.hadoop.hive.metastore.api.ReplicationMetricList in project hive by apache.

the class TestReplicationMetrics method testUpdateMetrics.

@Test
public void testUpdateMetrics() throws Exception {
    ObjectStore objStore = new ObjectStore();
    objStore.setConf(metaStore.getConf());
    objStore.deleteReplicationMetrics(0);
    ReplicationMetricList replicationMetricList = new ReplicationMetricList();
    List<ReplicationMetrics> replicationMetrics = new ArrayList<>();
    replicationMetrics.add(createReplicationMetric("repl1", 1L));
    replicationMetrics.add(createReplicationMetric("repl1", 2L));
    replicationMetricList.setReplicationMetricList(replicationMetrics);
    objStore.addReplicationMetrics(replicationMetricList);
    Thread.sleep(1000);
    replicationMetrics = new ArrayList<>();
    replicationMetrics.add(createReplicationMetric("repl2", 3L));
    replicationMetrics.add(createReplicationMetric("repl2", 4L));
    replicationMetricList.setReplicationMetricList(replicationMetrics);
    objStore.addReplicationMetrics(replicationMetricList);
    Thread.sleep(1000);
    replicationMetrics = new ArrayList<>();
    replicationMetrics.add(updateReplicationMetric("repl2", 3L, "progress1"));
    replicationMetricList.setReplicationMetricList(replicationMetrics);
    objStore.addReplicationMetrics(replicationMetricList);
    Thread.sleep(1000);
    GetReplicationMetricsRequest getReplicationMetricsRequest = new GetReplicationMetricsRequest();
    getReplicationMetricsRequest.setPolicy("repl1");
    ReplicationMetricList actualList = client.getReplicationMetrics(getReplicationMetricsRequest);
    assertEquals(2, actualList.getReplicationMetricListSize());
    List<ReplicationMetrics> actualMetrics = actualList.getReplicationMetricList();
    // Ordering should be descending
    ReplicationMetrics actualMetric0 = actualMetrics.get(0);
    assertEquals("repl1", actualMetric0.getPolicy());
    assertEquals(2L, actualMetric0.getScheduledExecutionId());
    assertEquals(1, actualMetric0.getDumpExecutionId());
    assertEquals("metadata", actualMetric0.getMetadata());
    assertEquals("progress", actualMetric0.getProgress());
    ReplicationMetrics actualMetric1 = actualMetrics.get(1);
    assertEquals("repl1", actualMetric1.getPolicy());
    assertEquals(1L, actualMetric1.getScheduledExecutionId());
    assertEquals(1, actualMetric1.getDumpExecutionId());
    assertEquals("metadata", actualMetric1.getMetadata());
    assertEquals("progress", actualMetric1.getProgress());
    getReplicationMetricsRequest = new GetReplicationMetricsRequest();
    getReplicationMetricsRequest.setPolicy("repl2");
    actualList = client.getReplicationMetrics(getReplicationMetricsRequest);
    assertEquals(2, actualList.getReplicationMetricListSize());
    actualMetrics = actualList.getReplicationMetricList();
    // Ordering should be descending
    actualMetric0 = actualMetrics.get(0);
    assertEquals("repl2", actualMetric0.getPolicy());
    assertEquals(4L, actualMetric0.getScheduledExecutionId());
    assertEquals(1, actualMetric0.getDumpExecutionId());
    assertEquals("metadata", actualMetric0.getMetadata());
    assertEquals("progress", actualMetric0.getProgress());
    actualMetric1 = actualMetrics.get(1);
    assertEquals("repl2", actualMetric1.getPolicy());
    assertEquals(3L, actualMetric1.getScheduledExecutionId());
    assertEquals(1, actualMetric1.getDumpExecutionId());
    assertEquals("metadata", actualMetric1.getMetadata());
    assertEquals("progress1", actualMetric1.getProgress());
}
Also used : GetReplicationMetricsRequest(org.apache.hadoop.hive.metastore.api.GetReplicationMetricsRequest) ObjectStore(org.apache.hadoop.hive.metastore.ObjectStore) ReplicationMetricList(org.apache.hadoop.hive.metastore.api.ReplicationMetricList) ReplicationMetrics(org.apache.hadoop.hive.metastore.api.ReplicationMetrics) ArrayList(java.util.ArrayList) Test(org.junit.Test) MetastoreUnitTest(org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest)

Example 5 with ReplicationMetricList

use of org.apache.hadoop.hive.metastore.api.ReplicationMetricList in project hive by apache.

the class TestReplicationMetrics method testAddMetrics.

@Test
public void testAddMetrics() throws Exception {
    ObjectStore objStore = new ObjectStore();
    objStore.setConf(metaStore.getConf());
    objStore.deleteReplicationMetrics(0);
    ReplicationMetricList replicationMetricList = new ReplicationMetricList();
    List<ReplicationMetrics> replicationMetrics = new ArrayList<>();
    replicationMetrics.add(createReplicationMetric("repl1", 1L));
    replicationMetrics.add(createReplicationMetric("repl1", 2L));
    replicationMetricList.setReplicationMetricList(replicationMetrics);
    objStore.addReplicationMetrics(replicationMetricList);
    Thread.sleep(1000);
    replicationMetrics = new ArrayList<>();
    replicationMetrics.add(createReplicationMetric("repl2", 3L));
    replicationMetrics.add(createReplicationMetric("repl2", 4L));
    replicationMetricList.setReplicationMetricList(replicationMetrics);
    objStore.addReplicationMetrics(replicationMetricList);
    Thread.sleep(1000);
    GetReplicationMetricsRequest getReplicationMetricsRequest = new GetReplicationMetricsRequest();
    getReplicationMetricsRequest.setPolicy("repl1");
    ReplicationMetricList actualList = client.getReplicationMetrics(getReplicationMetricsRequest);
    assertEquals(2, actualList.getReplicationMetricListSize());
    List<ReplicationMetrics> actualMetrics = actualList.getReplicationMetricList();
    // Ordering should be descending
    ReplicationMetrics actualMetric0 = actualMetrics.get(0);
    assertEquals("repl1", actualMetric0.getPolicy());
    assertEquals(2L, actualMetric0.getScheduledExecutionId());
    assertEquals(1, actualMetric0.getDumpExecutionId());
    assertEquals("metadata", actualMetric0.getMetadata());
    assertEquals("progress", actualMetric0.getProgress());
    ReplicationMetrics actualMetric1 = actualMetrics.get(1);
    assertEquals("repl1", actualMetric1.getPolicy());
    assertEquals(1L, actualMetric1.getScheduledExecutionId());
    assertEquals(1, actualMetric1.getDumpExecutionId());
    assertEquals("metadata", actualMetric1.getMetadata());
    assertEquals("progress", actualMetric1.getProgress());
    getReplicationMetricsRequest = new GetReplicationMetricsRequest();
    getReplicationMetricsRequest.setPolicy("repl2");
    actualList = client.getReplicationMetrics(getReplicationMetricsRequest);
    assertEquals(2, actualList.getReplicationMetricListSize());
    actualMetrics = actualList.getReplicationMetricList();
    // Ordering should be descending
    actualMetric0 = actualMetrics.get(0);
    assertEquals("repl2", actualMetric0.getPolicy());
    assertEquals(4L, actualMetric0.getScheduledExecutionId());
    assertEquals(1, actualMetric0.getDumpExecutionId());
    assertEquals("metadata", actualMetric0.getMetadata());
    assertEquals("progress", actualMetric0.getProgress());
    actualMetric1 = actualMetrics.get(1);
    assertEquals("repl2", actualMetric1.getPolicy());
    assertEquals(3L, actualMetric1.getScheduledExecutionId());
    assertEquals(1, actualMetric1.getDumpExecutionId());
    assertEquals("metadata", actualMetric1.getMetadata());
    assertEquals("progress", actualMetric1.getProgress());
}
Also used : GetReplicationMetricsRequest(org.apache.hadoop.hive.metastore.api.GetReplicationMetricsRequest) ObjectStore(org.apache.hadoop.hive.metastore.ObjectStore) ReplicationMetricList(org.apache.hadoop.hive.metastore.api.ReplicationMetricList) ReplicationMetrics(org.apache.hadoop.hive.metastore.api.ReplicationMetrics) ArrayList(java.util.ArrayList) Test(org.junit.Test) MetastoreUnitTest(org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest)

Aggregations

ReplicationMetricList (org.apache.hadoop.hive.metastore.api.ReplicationMetricList)9 ReplicationMetrics (org.apache.hadoop.hive.metastore.api.ReplicationMetrics)7 ArrayList (java.util.ArrayList)6 GetReplicationMetricsRequest (org.apache.hadoop.hive.metastore.api.GetReplicationMetricsRequest)6 Test (org.junit.Test)6 ObjectStore (org.apache.hadoop.hive.metastore.ObjectStore)4 MetastoreUnitTest (org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest)4 HashMap (java.util.HashMap)2 LinkedList (java.util.LinkedList)2 List (java.util.List)2 ValidReaderWriteIdList (org.apache.hadoop.hive.common.ValidReaderWriteIdList)2 ValidWriteIdList (org.apache.hadoop.hive.common.ValidWriteIdList)2 MReplicationMetrics (org.apache.hadoop.hive.metastore.model.MReplicationMetrics)2 MStringList (org.apache.hadoop.hive.metastore.model.MStringList)2 ReplStatsTracker (org.apache.hadoop.hive.ql.exec.repl.ReplStatsTracker)2 SnapshotUtils (org.apache.hadoop.hive.ql.exec.repl.util.SnapshotUtils)2 IncrementalDumpMetricCollector (org.apache.hadoop.hive.ql.parse.repl.dump.metric.IncrementalDumpMetricCollector)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 BootstrapDumpMetricCollector (org.apache.hadoop.hive.ql.parse.repl.dump.metric.BootstrapDumpMetricCollector)1 Metadata (org.apache.hadoop.hive.ql.parse.repl.metric.event.Metadata)1