use of org.apache.hadoop.hive.ql.parse.repl.metric.event.Stage in project hive by apache.
the class TestReplicationMetricCollector method testSuccessBootstrapDumpMetrics.
@Test
public void testSuccessBootstrapDumpMetrics() throws Exception {
ReplicationMetricCollector bootstrapDumpMetricCollector = new BootstrapDumpMetricCollector("db", "dummyDir", conf);
Map<String, Long> metricMap = new HashMap<>();
metricMap.put(ReplUtils.MetricName.TABLES.name(), (long) 10);
metricMap.put(ReplUtils.MetricName.FUNCTIONS.name(), (long) 1);
bootstrapDumpMetricCollector.reportStageStart("dump", metricMap);
bootstrapDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 1);
List<ReplicationMetric> actualMetrics = MetricCollector.getInstance().getMetrics();
Assert.assertEquals(1, actualMetrics.size());
bootstrapDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.TABLES.name(), 2);
bootstrapDumpMetricCollector.reportStageProgress("dump", ReplUtils.MetricName.FUNCTIONS.name(), 1);
actualMetrics = MetricCollector.getInstance().getMetrics();
Assert.assertEquals(1, actualMetrics.size());
bootstrapDumpMetricCollector.reportStageEnd("dump", Status.SUCCESS, 10, new SnapshotUtils.ReplSnapshotCount(), new ReplStatsTracker(0));
bootstrapDumpMetricCollector.reportEnd(Status.SUCCESS);
actualMetrics = MetricCollector.getInstance().getMetrics();
Assert.assertEquals(1, actualMetrics.size());
Metadata expectedMetadata = new Metadata("db", Metadata.ReplicationType.BOOTSTRAP, "dummyDir");
expectedMetadata.setLastReplId(10);
Progress expectedProgress = new Progress();
expectedProgress.setStatus(Status.SUCCESS);
Stage dumpStage = new Stage("dump", Status.SUCCESS, 0);
dumpStage.setEndTime(0);
Metric expectedTableMetric = new Metric(ReplUtils.MetricName.TABLES.name(), 10);
expectedTableMetric.setCurrentCount(3);
Metric expectedFuncMetric = new Metric(ReplUtils.MetricName.FUNCTIONS.name(), 1);
expectedFuncMetric.setCurrentCount(1);
dumpStage.addMetric(expectedTableMetric);
dumpStage.addMetric(expectedFuncMetric);
expectedProgress.addStage(dumpStage);
ReplicationMetric expectedMetric = new ReplicationMetric(1, "repl", 0, expectedMetadata);
expectedMetric.setProgress(expectedProgress);
checkSuccess(actualMetrics.get(0), expectedMetric, "dump", Arrays.asList(ReplUtils.MetricName.TABLES.name(), ReplUtils.MetricName.FUNCTIONS.name()));
}
use of org.apache.hadoop.hive.ql.parse.repl.metric.event.Stage in project hive by apache.
the class TestScheduledReplicationScenarios method checkMetrics.
private void checkMetrics(List<ReplicationMetric> expectedReplicationMetrics, List<ReplicationMetric> actualMetrics) {
Assert.assertEquals(expectedReplicationMetrics.size(), actualMetrics.size());
int metricCounter = 0;
for (ReplicationMetric actualMetric : actualMetrics) {
for (ReplicationMetric expecMetric : expectedReplicationMetrics) {
if (actualMetric.getPolicy().equalsIgnoreCase(expecMetric.getPolicy())) {
Assert.assertEquals(expecMetric.getDumpExecutionId(), actualMetric.getDumpExecutionId());
Assert.assertEquals(expecMetric.getMetadata().getDbName(), actualMetric.getMetadata().getDbName());
Assert.assertEquals(expecMetric.getMetadata().getLastReplId(), actualMetric.getMetadata().getLastReplId());
Assert.assertEquals(expecMetric.getMetadata().getStagingDir(), actualMetric.getMetadata().getStagingDir());
Assert.assertEquals(expecMetric.getMetadata().getReplicationType(), actualMetric.getMetadata().getReplicationType());
Assert.assertEquals(expecMetric.getProgress().getStatus(), actualMetric.getProgress().getStatus());
Assert.assertEquals(expecMetric.getProgress().getStages().size(), actualMetric.getProgress().getStages().size());
List<Stage> expectedStages = expecMetric.getProgress().getStages();
List<Stage> actualStages = actualMetric.getProgress().getStages();
int counter = 0;
for (Stage actualStage : actualStages) {
for (Stage expeStage : expectedStages) {
if (actualStage.getName().equalsIgnoreCase(expeStage.getName())) {
Assert.assertEquals(expeStage.getStatus(), actualStage.getStatus());
Assert.assertEquals(expeStage.getMetrics().size(), actualStage.getMetrics().size());
for (Metric actMetric : actualStage.getMetrics()) {
for (Metric expMetric : expeStage.getMetrics()) {
if (actMetric.getName().equalsIgnoreCase(expMetric.getName())) {
Assert.assertEquals(expMetric.getTotalCount(), actMetric.getTotalCount());
Assert.assertEquals(expMetric.getCurrentCount(), actMetric.getCurrentCount());
}
}
}
counter++;
if (counter == actualStages.size()) {
break;
}
}
}
}
metricCounter++;
if (metricCounter == actualMetrics.size()) {
break;
}
}
}
}
}
use of org.apache.hadoop.hive.ql.parse.repl.metric.event.Stage in project hive by apache.
the class TestScheduledReplicationScenarios method generateDumpStages.
private List<Stage> generateDumpStages(boolean isBootstrap) {
List<Stage> stages = new ArrayList<>();
// Ranger
Stage rangerDump = new Stage("RANGER_DUMP", Status.SUCCESS, 0);
Metric rangerMetric = new Metric(ReplUtils.MetricName.POLICIES.name(), 0);
rangerDump.addMetric(rangerMetric);
stages.add(rangerDump);
// Atlas
Stage atlasDump = new Stage("ATLAS_DUMP", Status.SUCCESS, 0);
Metric atlasMetric = new Metric(ReplUtils.MetricName.ENTITIES.name(), 0);
atlasDump.addMetric(atlasMetric);
stages.add(atlasDump);
// Hive
Stage replDump = new Stage("REPL_DUMP", Status.SUCCESS, 0);
if (isBootstrap) {
Metric hiveMetric = new Metric(ReplUtils.MetricName.TABLES.name(), 1);
hiveMetric.setCurrentCount(1);
replDump.addMetric(hiveMetric);
hiveMetric = new Metric(ReplUtils.MetricName.FUNCTIONS.name(), 0);
replDump.addMetric(hiveMetric);
} else {
Metric hiveMetric = new Metric(ReplUtils.MetricName.EVENTS.name(), 1);
hiveMetric.setCurrentCount(1);
replDump.addMetric(hiveMetric);
}
stages.add(replDump);
return stages;
}
Aggregations