Search in sources :

Example 6 with TableHeader

use of org.apache.hudi.cli.TableHeader in project hudi by apache.

the class TestFileSystemViewCommand method testShowCommits.

/**
 * Test case for 'show fsview all'.
 */
@Test
public void testShowCommits() {
    // Test default show fsview all
    CommandResult cr = shell().executeCommand("show fsview all");
    assertTrue(cr.isSuccess());
    // Get all file groups
    Stream<HoodieFileGroup> fileGroups = fsView.getAllFileGroups(partitionPath);
    List<Comparable[]> rows = new ArrayList<>();
    fileGroups.forEach(fg -> fg.getAllFileSlices().forEach(fs -> {
        int idx = 0;
        // For base file only Views, do not display any delta-file related columns
        Comparable[] row = new Comparable[8];
        row[idx++] = fg.getPartitionPath();
        row[idx++] = fg.getFileGroupId().getFileId();
        row[idx++] = fs.getBaseInstantTime();
        row[idx++] = fs.getBaseFile().isPresent() ? fs.getBaseFile().get().getPath() : "";
        row[idx++] = fs.getBaseFile().isPresent() ? fs.getBaseFile().get().getFileSize() : -1;
        row[idx++] = fs.getLogFiles().count();
        row[idx++] = fs.getLogFiles().mapToLong(HoodieLogFile::getFileSize).sum();
        row[idx++] = fs.getLogFiles().collect(Collectors.toList()).toString();
        rows.add(row);
    }));
    Function<Object, String> converterFunction = entry -> NumericUtils.humanReadableByteCount((Double.parseDouble(entry.toString())));
    Map<String, Function<Object, String>> fieldNameToConverterMap = new HashMap<>();
    fieldNameToConverterMap.put(HoodieTableHeaderFields.HEADER_TOTAL_DELTA_FILE_SIZE, converterFunction);
    fieldNameToConverterMap.put(HoodieTableHeaderFields.HEADER_DATA_FILE_SIZE, converterFunction);
    TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_PARTITION).addTableHeaderField(HoodieTableHeaderFields.HEADER_FILE_ID).addTableHeaderField(HoodieTableHeaderFields.HEADER_BASE_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_DATA_FILE).addTableHeaderField(HoodieTableHeaderFields.HEADER_DATA_FILE_SIZE).addTableHeaderField(HoodieTableHeaderFields.HEADER_NUM_DELTA_FILES).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_DELTA_FILE_SIZE).addTableHeaderField(HoodieTableHeaderFields.HEADER_DELTA_FILES);
    String expected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, rows);
    expected = removeNonWordAndStripSpace(expected);
    String got = removeNonWordAndStripSpace(cr.getResult().toString());
    assertEquals(expected, got);
}
Also used : BeforeEach(org.junit.jupiter.api.BeforeEach) FileSlice(org.apache.hudi.common.model.FileSlice) HoodieTableHeaderFields(org.apache.hudi.cli.HoodieTableHeaderFields) HashMap(java.util.HashMap) Function(java.util.function.Function) ArrayList(java.util.ArrayList) HoodieFileGroup(org.apache.hudi.common.model.HoodieFileGroup) HoodieTableMetaClient(org.apache.hudi.common.table.HoodieTableMetaClient) Map(java.util.Map) HoodieLogFile(org.apache.hudi.common.model.HoodieLogFile) Tag(org.junit.jupiter.api.Tag) Assertions.assertEquals(org.junit.jupiter.api.Assertions.assertEquals) CLIFunctionalTestHarness(org.apache.hudi.cli.functional.CLIFunctionalTestHarness) SyncableFileSystemView(org.apache.hudi.common.table.view.SyncableFileSystemView) Files(java.nio.file.Files) TableHeader(org.apache.hudi.cli.TableHeader) HoodieTestCommitMetadataGenerator(org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator) IOException(java.io.IOException) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) HoodieTableFileSystemView(org.apache.hudi.common.table.view.HoodieTableFileSystemView) HoodieCLI(org.apache.hudi.cli.HoodieCLI) Test(org.junit.jupiter.api.Test) List(java.util.List) Stream(java.util.stream.Stream) Paths(java.nio.file.Paths) Assertions.assertTrue(org.junit.jupiter.api.Assertions.assertTrue) CommandResult(org.springframework.shell.core.CommandResult) HoodiePrintHelper(org.apache.hudi.cli.HoodiePrintHelper) FSUtils(org.apache.hudi.common.fs.FSUtils) NumericUtils(org.apache.hudi.common.util.NumericUtils) TableHeader(org.apache.hudi.cli.TableHeader) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) HoodieFileGroup(org.apache.hudi.common.model.HoodieFileGroup) CommandResult(org.springframework.shell.core.CommandResult) Function(java.util.function.Function) Test(org.junit.jupiter.api.Test)

Example 7 with TableHeader

use of org.apache.hudi.cli.TableHeader in project hudi by apache.

the class TestHoodieLogFileCommand method testShowLogFileCommits.

/**
 * Test case for 'show logfile metadata'.
 */
@Test
public void testShowLogFileCommits() throws JsonProcessingException {
    CommandResult cr = shell().executeCommand("show logfile metadata --logFilePathPattern " + partitionPath + "/*");
    assertTrue(cr.isSuccess());
    TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT_TIME).addTableHeaderField(HoodieTableHeaderFields.HEADER_RECORD_COUNT).addTableHeaderField(HoodieTableHeaderFields.HEADER_BLOCK_TYPE).addTableHeaderField(HoodieTableHeaderFields.HEADER_HEADER_METADATA).addTableHeaderField(HoodieTableHeaderFields.HEADER_FOOTER_METADATA);
    // construct expect result, there is only 1 line.
    List<Comparable[]> rows = new ArrayList<>();
    ObjectMapper objectMapper = new ObjectMapper();
    String headerStr = objectMapper.writeValueAsString(dataBlock.getLogBlockHeader());
    String footerStr = objectMapper.writeValueAsString(dataBlock.getLogBlockFooter());
    Comparable[] output = new Comparable[] { INSTANT_TIME, 100, dataBlock.getBlockType(), headerStr, footerStr };
    rows.add(output);
    String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
    expected = removeNonWordAndStripSpace(expected);
    String got = removeNonWordAndStripSpace(cr.getResult().toString());
    assertEquals(expected, got);
}
Also used : TableHeader(org.apache.hudi.cli.TableHeader) ArrayList(java.util.ArrayList) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) CommandResult(org.springframework.shell.core.CommandResult) Test(org.junit.jupiter.api.Test)

Example 8 with TableHeader

use of org.apache.hudi.cli.TableHeader in project hudi by apache.

the class TestRollbacksCommand method testShowRollback.

/**
 * Test case for command 'show rollback'.
 */
@Test
public void testShowRollback() throws IOException {
    // get instant
    HoodieActiveTimeline activeTimeline = new RollbacksCommand.RollbackTimeline(HoodieCLI.getTableMetaClient());
    Stream<HoodieInstant> rollback = activeTimeline.getRollbackTimeline().filterCompletedInstants().getInstants();
    HoodieInstant instant = rollback.findFirst().orElse(null);
    assertNotNull(instant, "The instant can not be null.");
    CommandResult cr = shell().executeCommand("show rollback --instant " + instant.getTimestamp());
    assertTrue(cr.isSuccess());
    List<Comparable[]> rows = new ArrayList<>();
    // get metadata of instant
    HoodieRollbackMetadata metadata = TimelineMetadataUtils.deserializeAvroMetadata(activeTimeline.getInstantDetails(instant).get(), HoodieRollbackMetadata.class);
    // generate expect result
    metadata.getPartitionMetadata().forEach((key, value) -> Stream.concat(value.getSuccessDeleteFiles().stream().map(f -> Pair.of(f, true)), value.getFailedDeleteFiles().stream().map(f -> Pair.of(f, false))).forEach(fileWithDeleteStatus -> {
        Comparable[] row = new Comparable[5];
        row[0] = metadata.getStartRollbackTime();
        row[1] = metadata.getCommitsRollback().toString();
        row[2] = key;
        row[3] = fileWithDeleteStatus.getLeft();
        row[4] = fileWithDeleteStatus.getRight();
        rows.add(row);
    }));
    TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_ROLLBACK_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_PARTITION).addTableHeaderField(HoodieTableHeaderFields.HEADER_DELETED_FILE).addTableHeaderField(HoodieTableHeaderFields.HEADER_SUCCEEDED);
    String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
    expected = removeNonWordAndStripSpace(expected);
    String got = removeNonWordAndStripSpace(cr.getResult().toString());
    assertEquals(expected, got);
}
Also used : HoodieInstant(org.apache.hudi.common.table.timeline.HoodieInstant) BeforeEach(org.junit.jupiter.api.BeforeEach) Assertions.assertNotNull(org.junit.jupiter.api.Assertions.assertNotNull) HoodieInstant(org.apache.hudi.common.table.timeline.HoodieInstant) HoodieTableHeaderFields(org.apache.hudi.cli.HoodieTableHeaderFields) HashMap(java.util.HashMap) DEFAULT_FIRST_PARTITION_PATH(org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH) ArrayList(java.util.ArrayList) HoodieTableType(org.apache.hudi.common.model.HoodieTableType) HoodieTableMetaClient(org.apache.hudi.common.table.HoodieTableMetaClient) DEFAULT_PARTITION_PATHS(org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS) Map(java.util.Map) HoodieRollbackMetadata(org.apache.hudi.avro.model.HoodieRollbackMetadata) Tag(org.junit.jupiter.api.Tag) Assertions.assertEquals(org.junit.jupiter.api.Assertions.assertEquals) CLIFunctionalTestHarness(org.apache.hudi.cli.functional.CLIFunctionalTestHarness) HoodieActiveTimeline(org.apache.hudi.common.table.timeline.HoodieActiveTimeline) TimelineLayoutVersion(org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion) BaseHoodieWriteClient(org.apache.hudi.client.BaseHoodieWriteClient) HoodieWriteConfig(org.apache.hudi.config.HoodieWriteConfig) DEFAULT_SECOND_PARTITION_PATH(org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH) TableHeader(org.apache.hudi.cli.TableHeader) HoodieTestTable(org.apache.hudi.common.testutils.HoodieTestTable) TimelineMetadataUtils(org.apache.hudi.common.table.timeline.TimelineMetadataUtils) IOException(java.io.IOException) HoodieIndex(org.apache.hudi.index.HoodieIndex) HoodieCLI(org.apache.hudi.cli.HoodieCLI) Test(org.junit.jupiter.api.Test) List(java.util.List) SparkRDDWriteClient(org.apache.hudi.client.SparkRDDWriteClient) Stream(java.util.stream.Stream) Assertions.assertTrue(org.junit.jupiter.api.Assertions.assertTrue) HoodieIndexConfig(org.apache.hudi.config.HoodieIndexConfig) CommandResult(org.springframework.shell.core.CommandResult) HoodiePrintHelper(org.apache.hudi.cli.HoodiePrintHelper) Pair(org.apache.hudi.common.util.collection.Pair) DEFAULT_THIRD_PARTITION_PATH(org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH) HoodieRollbackMetadata(org.apache.hudi.avro.model.HoodieRollbackMetadata) TableHeader(org.apache.hudi.cli.TableHeader) HoodieActiveTimeline(org.apache.hudi.common.table.timeline.HoodieActiveTimeline) ArrayList(java.util.ArrayList) CommandResult(org.springframework.shell.core.CommandResult) Test(org.junit.jupiter.api.Test)

Example 9 with TableHeader

use of org.apache.hudi.cli.TableHeader in project hudi by apache.

the class TestRollbacksCommand method testShowRollbacks.

/**
 * Test case for command 'show rollbacks'.
 */
@Test
public void testShowRollbacks() {
    CommandResult cr = shell().executeCommand("show rollbacks");
    assertTrue(cr.isSuccess());
    // get rollback instants
    HoodieActiveTimeline activeTimeline = new RollbacksCommand.RollbackTimeline(HoodieCLI.getTableMetaClient());
    Stream<HoodieInstant> rollback = activeTimeline.getRollbackTimeline().filterCompletedInstants().getInstants();
    List<Comparable[]> rows = new ArrayList<>();
    rollback.sorted().forEach(instant -> {
        try {
            // get pair of rollback time and instant time
            HoodieRollbackMetadata metadata = TimelineMetadataUtils.deserializeAvroMetadata(activeTimeline.getInstantDetails(instant).get(), HoodieRollbackMetadata.class);
            metadata.getCommitsRollback().forEach(c -> {
                Comparable[] row = new Comparable[5];
                row[0] = metadata.getStartRollbackTime();
                row[1] = c;
                // expect data
                row[2] = 3;
                row[3] = metadata.getTimeTakenInMillis();
                row[4] = 3;
                rows.add(row);
            });
        } catch (IOException e) {
            e.printStackTrace();
        }
    });
    TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_ROLLBACK_INSTANT).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_FILES_DELETED).addTableHeaderField(HoodieTableHeaderFields.HEADER_TIME_TOKEN_MILLIS).addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_PARTITIONS);
    String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows);
    expected = removeNonWordAndStripSpace(expected);
    String got = removeNonWordAndStripSpace(cr.getResult().toString());
    assertEquals(expected, got);
}
Also used : HoodieInstant(org.apache.hudi.common.table.timeline.HoodieInstant) HoodieRollbackMetadata(org.apache.hudi.avro.model.HoodieRollbackMetadata) TableHeader(org.apache.hudi.cli.TableHeader) HoodieActiveTimeline(org.apache.hudi.common.table.timeline.HoodieActiveTimeline) ArrayList(java.util.ArrayList) IOException(java.io.IOException) CommandResult(org.springframework.shell.core.CommandResult) Test(org.junit.jupiter.api.Test)

Example 10 with TableHeader

use of org.apache.hudi.cli.TableHeader in project hudi by apache.

the class TestStatsCommand method testFileSizeStats.

/**
 * Test case for command 'stats filesizes'.
 */
@Test
public void testFileSizeStats() throws Exception {
    String commit1 = "100";
    String commit2 = "101";
    Map<String, Integer[]> data = new LinkedHashMap<>();
    data.put(commit1, new Integer[] { 100, 120, 150 });
    data.put(commit2, new Integer[] { 200, 180, 250, 300 });
    // generate data file
    String partition1 = HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH;
    String partition2 = HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH;
    String partition3 = HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH;
    HoodieTestTable testTable = HoodieTestTable.of(HoodieCLI.getTableMetaClient());
    Integer[] data1 = data.get(commit1);
    assertTrue(3 <= data1.length);
    testTable.addCommit(commit1).withBaseFilesInPartition(partition1, data1[0]).withBaseFilesInPartition(partition2, data1[1]).withBaseFilesInPartition(partition3, data1[2]);
    Integer[] data2 = data.get(commit2);
    assertTrue(4 <= data2.length);
    testTable.addCommit(commit2).withBaseFilesInPartition(partition1, data2[0]).withBaseFilesInPartition(partition2, data2[1], data2[2]).withBaseFilesInPartition(partition3, data2[3]);
    CommandResult cr = shell().executeCommand("stats filesizes");
    assertTrue(cr.isSuccess());
    Histogram globalHistogram = new Histogram(new UniformReservoir(StatsCommand.MAX_FILES));
    HashMap<String, Histogram> commitHistoMap = new HashMap<>();
    data.forEach((k, v) -> {
        commitHistoMap.put(k, new Histogram(new UniformReservoir(StatsCommand.MAX_FILES)));
        for (int value : v) {
            commitHistoMap.get(k).update(value);
            globalHistogram.update(value);
        }
    });
    // generate expect
    List<Comparable[]> rows = new ArrayList<>();
    for (Map.Entry<String, Histogram> entry : commitHistoMap.entrySet()) {
        Snapshot s = entry.getValue().getSnapshot();
        rows.add(new StatsCommand().printFileSizeHistogram(entry.getKey(), s));
    }
    Snapshot s = globalHistogram.getSnapshot();
    rows.add(new StatsCommand().printFileSizeHistogram("ALL", s));
    TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_COMMIT_TIME).addTableHeaderField(HoodieTableHeaderFields.HEADER_HISTOGRAM_MIN).addTableHeaderField(HoodieTableHeaderFields.HEADER_HISTOGRAM_10TH).addTableHeaderField(HoodieTableHeaderFields.HEADER_HISTOGRAM_50TH).addTableHeaderField(HoodieTableHeaderFields.HEADER_HISTOGRAM_AVG).addTableHeaderField(HoodieTableHeaderFields.HEADER_HISTOGRAM_95TH).addTableHeaderField(HoodieTableHeaderFields.HEADER_HISTOGRAM_MAX).addTableHeaderField(HoodieTableHeaderFields.HEADER_HISTOGRAM_NUM_FILES).addTableHeaderField(HoodieTableHeaderFields.HEADER_HISTOGRAM_STD_DEV);
    String expect = HoodiePrintHelper.print(header, new StatsCommand().getFieldNameToConverterMap(), "", false, -1, false, rows);
    expect = removeNonWordAndStripSpace(expect);
    String got = removeNonWordAndStripSpace(cr.getResult().toString());
    assertEquals(expect, got);
}
Also used : Histogram(com.codahale.metrics.Histogram) TableHeader(org.apache.hudi.cli.TableHeader) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) CommandResult(org.springframework.shell.core.CommandResult) Snapshot(com.codahale.metrics.Snapshot) HoodieTestTable(org.apache.hudi.common.testutils.HoodieTestTable) UniformReservoir(com.codahale.metrics.UniformReservoir) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) Test(org.junit.jupiter.api.Test)

Aggregations

ArrayList (java.util.ArrayList)45 TableHeader (org.apache.hudi.cli.TableHeader)45 HashMap (java.util.HashMap)33 CliCommand (org.springframework.shell.core.annotation.CliCommand)22 Map (java.util.Map)19 HoodieInstant (org.apache.hudi.common.table.timeline.HoodieInstant)19 List (java.util.List)18 Test (org.junit.jupiter.api.Test)18 CommandResult (org.springframework.shell.core.CommandResult)18 IOException (java.io.IOException)17 Function (java.util.function.Function)17 HoodieCLI (org.apache.hudi.cli.HoodieCLI)15 HoodiePrintHelper (org.apache.hudi.cli.HoodiePrintHelper)15 HoodieTableMetaClient (org.apache.hudi.common.table.HoodieTableMetaClient)15 HoodieTimeline (org.apache.hudi.common.table.timeline.HoodieTimeline)14 HoodieActiveTimeline (org.apache.hudi.common.table.timeline.HoodieActiveTimeline)12 Collectors (java.util.stream.Collectors)11 Path (org.apache.hadoop.fs.Path)10 HoodieTableHeaderFields (org.apache.hudi.cli.HoodieTableHeaderFields)10 FSUtils (org.apache.hudi.common.fs.FSUtils)9