use of org.apache.hadoop.hive.ql.parse.repl.metric.event.ReplicationMetric in project hive by apache.
the class TestReplicationMetricCollector method testFailureCacheHardLimit.
@Test
public void testFailureCacheHardLimit() throws Exception {
MetricCollector.getInstance().deinit();
conf = new HiveConf();
MetricCollector collector = MetricCollector.getInstance();
MetricCollector metricCollectorSpy = Mockito.spy(collector);
Mockito.doReturn(1L).when(metricCollectorSpy).getMaxSize(Mockito.any());
metricCollectorSpy.init(conf);
metricCollectorSpy.addMetric(new ReplicationMetric(1, "repl", 0, null));
try {
metricCollectorSpy.addMetric(new ReplicationMetric(2, "repl", 0, null));
Assert.fail();
} catch (SemanticException e) {
Assert.assertEquals("Metrics are not getting collected. ", e.getMessage());
}
}
use of org.apache.hadoop.hive.ql.parse.repl.metric.event.ReplicationMetric in project hive by apache.
the class TestReplicationMetricCollector method testSuccessBootstrapLoadMetrics.
@Test
public void testSuccessBootstrapLoadMetrics() throws Exception {
ReplicationMetricCollector bootstrapLoadMetricCollector = new BootstrapLoadMetricCollector("db", "dummyDir", 1, conf);
Map<String, Long> metricMap = new HashMap<>();
metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10);
metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1);
bootstrapLoadMetricCollector.reportStageStart("dump", metricMap);
bootstrapLoadMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 1);
List<ReplicationMetric> actualMetrics = MetricCollector.getInstance().getMetrics();
Assert.assertEquals(1, actualMetrics.size());
bootstrapLoadMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 2);
bootstrapLoadMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.FUNCTIONS.name(), 1);
actualMetrics = MetricCollector.getInstance().getMetrics();
Assert.assertEquals(1, actualMetrics.size());
bootstrapLoadMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10, new SnapshotUtils.ReplSnapshotCount(), new ReplStatsTracker(0));
bootstrapLoadMetricCollector.reportEnd(Status.SUCCESS);
actualMetrics = MetricCollector.getInstance().getMetrics();
Assert.assertEquals(1, actualMetrics.size());
Metadata expectedMetadata = new Metadata("db", Metadata.ReplicationType.BOOTSTRAP, "dummyDir");
expectedMetadata.setLastReplId(10);
Progress expectedProgress = new Progress();
expectedProgress.setStatus(Status.SUCCESS);
Stage dumpStage = new Stage("dump", Status.SUCCESS, 0);
dumpStage.setEndTime(0);
Metric expectedTableMetric = new Metric(ReplUtils.MetricName.TABLES.name(), 10);
expectedTableMetric.setCurrentCount(3);
Metric expectedFuncMetric = new Metric(ReplUtils.MetricName.FUNCTIONS.name(), 1);
expectedFuncMetric.setCurrentCount(1);
dumpStage.addMetric(expectedTableMetric);
dumpStage.addMetric(expectedFuncMetric);
expectedProgress.addStage(dumpStage);
ReplicationMetric expectedMetric = new ReplicationMetric(1, "repl", 1, expectedMetadata);
expectedMetric.setProgress(expectedProgress);
checkSuccess(actualMetrics.get(0), expectedMetric, "dump", Arrays.asList(ReplUtils.MetricName.TABLES.name(), ReplUtils.MetricName.FUNCTIONS.name()));
}
use of org.apache.hadoop.hive.ql.parse.repl.metric.event.ReplicationMetric in project hive by apache.
the class TestReplicationMetricCollector method testSuccessStageFailedAdmin.
@Test
public void testSuccessStageFailedAdmin() throws Exception {
ReplicationMetricCollector bootstrapDumpMetricCollector = new BootstrapDumpMetricCollector("db", "dummyDir", conf);
Map<String, Long> metricMap = new HashMap<>();
metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10);
metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1);
bootstrapDumpMetricCollector.reportStageStart("dump", metricMap);
bootstrapDumpMetricCollector.reportStageEnd("dump", Status.FAILED_ADMIN, "errorlogpath");
List<ReplicationMetric> metricList = MetricCollector.getInstance().getMetrics();
Assert.assertEquals(1, metricList.size());
ReplicationMetric actualMetric = metricList.get(0);
Assert.assertEquals(Status.FAILED_ADMIN, actualMetric.getProgress().getStatus());
Assert.assertEquals("errorlogpath", actualMetric.getProgress().getStageByName("dump").getErrorLogPath());
}
use of org.apache.hadoop.hive.ql.parse.repl.metric.event.ReplicationMetric in project hive by apache.
the class TestReplicationMetricCollector method testFailoverReadyDumpMetrics.
@Test
public void testFailoverReadyDumpMetrics() throws Exception {
ReplicationMetricCollector incrDumpMetricCollector = new IncrementalDumpMetricCollector("db", "dummyDir", conf);
Map<String, Long> metricMap = new HashMap<>();
metricMap.put(ReplUtils.MetricName.EVENTS.name(), (long) 10);
incrDumpMetricCollector.reportFailoverStart("dump", metricMap, fmd);
incrDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.EVENTS.name(), 2);
List<ReplicationMetric> actualMetrics = MetricCollector.getInstance().getMetrics();
Assert.assertEquals(1, actualMetrics.size());
incrDumpMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10, new SnapshotUtils.ReplSnapshotCount(), new ReplStatsTracker(0));
incrDumpMetricCollector.reportEnd(Status.FAILOVER_READY);
actualMetrics = MetricCollector.getInstance().getMetrics();
Assert.assertEquals(1, actualMetrics.size());
Metadata expectedMetadata = new Metadata("db", Metadata.ReplicationType.INCREMENTAL, "dummyDir");
expectedMetadata.setLastReplId(10);
expectedMetadata.setFailoverEventId(10);
expectedMetadata.setFailoverMetadataLoc("dummyDir");
Progress expectedProgress = new Progress();
expectedProgress.setStatus(Status.FAILOVER_READY);
Stage dumpStage = new Stage("dump", Status.SUCCESS, 0);
dumpStage.setEndTime(0);
Metric expectedEventMetric = new Metric(ReplUtils.MetricName.EVENTS.name(), 10);
expectedEventMetric.setCurrentCount(2);
dumpStage.addMetric(expectedEventMetric);
expectedProgress.addStage(dumpStage);
ReplicationMetric expectedMetric = new ReplicationMetric(1, "repl", 0, expectedMetadata);
expectedMetric.setProgress(expectedProgress);
checkSuccess(actualMetrics.get(0), expectedMetric, "dump", Arrays.asList(ReplUtils.MetricName.EVENTS.name()));
}
use of org.apache.hadoop.hive.ql.parse.repl.metric.event.ReplicationMetric in project hive by apache.
the class TestReplicationMetricUpdateOnFailure method performNonRecoverableChecks.
void performNonRecoverableChecks(String dumpDir, String stageName) throws IOException {
List<ReplicationMetric> metricList = MetricCollector.getInstance().getMetrics();
Assert.assertEquals(1, metricList.size());
ReplicationMetric updatedMetric = metricList.get(0);
Progress updatedProgress = updatedMetric.getProgress();
Assert.assertEquals(Status.FAILED_ADMIN, updatedProgress.getStatus());
Assert.assertEquals(1, updatedProgress.getStages().size());
Assert.assertEquals(Status.FAILED_ADMIN, updatedProgress.getStageByName(stageName).getStatus());
Assert.assertNotEquals(0, updatedProgress.getStageByName(stageName).getEndTime());
Path expectedNonRecoverablePath = new Path(new Path(dumpDir), ReplAck.NON_RECOVERABLE_MARKER.toString());
Assert.assertTrue(fs.exists(expectedNonRecoverablePath));
fs.delete(expectedNonRecoverablePath, true);
MetricCollector.getInstance().deinit();
}
Aggregations