Search in sources :

Example 6 with TimelineLayoutVersion

use of org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion in project hudi by apache.

the class TestHoodieActiveTimeline method testLoadingInstantsFromFiles.

@Test
public void testLoadingInstantsFromFiles() throws IOException {
    HoodieInstant instant1 = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMMIT_ACTION, "1");
    HoodieInstant instant2 = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMMIT_ACTION, "3");
    HoodieInstant instant3 = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMMIT_ACTION, "5");
    HoodieInstant instant4 = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMMIT_ACTION, "8");
    HoodieInstant instant1Complete = new HoodieInstant(false, HoodieTimeline.COMMIT_ACTION, "1");
    HoodieInstant instant2Complete = new HoodieInstant(false, HoodieTimeline.COMMIT_ACTION, "3");
    HoodieInstant instant3Complete = new HoodieInstant(false, HoodieTimeline.COMMIT_ACTION, "5");
    HoodieInstant instant4Complete = new HoodieInstant(false, HoodieTimeline.COMMIT_ACTION, "8");
    HoodieInstant instant5 = new HoodieInstant(true, HoodieTimeline.COMMIT_ACTION, "9");
    timeline = new HoodieActiveTimeline(metaClient);
    timeline.createNewInstant(instant1);
    timeline.transitionRequestedToInflight(instant1, Option.empty());
    timeline.saveAsComplete(new HoodieInstant(true, instant1.getAction(), instant1.getTimestamp()), Option.empty());
    timeline.createNewInstant(instant2);
    timeline.transitionRequestedToInflight(instant2, Option.empty());
    timeline.saveAsComplete(new HoodieInstant(true, instant2.getAction(), instant2.getTimestamp()), Option.empty());
    timeline.createNewInstant(instant3);
    timeline.transitionRequestedToInflight(instant3, Option.empty());
    timeline.saveAsComplete(new HoodieInstant(true, instant3.getAction(), instant3.getTimestamp()), Option.empty());
    timeline.createNewInstant(instant4);
    timeline.transitionRequestedToInflight(instant4, Option.empty());
    timeline.saveAsComplete(new HoodieInstant(true, instant4.getAction(), instant4.getTimestamp()), Option.empty());
    timeline.createNewInstant(instant5);
    timeline = timeline.reload();
    assertEquals(5, timeline.countInstants(), "Total instants should be 5");
    assertStreamEquals(Stream.of(instant1Complete, instant2Complete, instant3Complete, instant4Complete, instant5), timeline.getInstants(), "Check the instants stream");
    assertStreamEquals(Stream.of(instant1Complete, instant2Complete, instant3Complete, instant4Complete, instant5), timeline.getCommitTimeline().getInstants(), "Check the instants stream");
    assertStreamEquals(Stream.of(instant1Complete, instant2Complete, instant3Complete, instant4Complete), timeline.getCommitTimeline().filterCompletedInstants().getInstants(), "Check the instants stream");
    assertStreamEquals(Stream.of(instant5), timeline.getCommitTimeline().filterPendingExcludingCompaction().getInstants(), "Check the instants stream");
    // Backwards compatibility testing for reading compaction plans
    metaClient = HoodieTableMetaClient.withPropertyBuilder().fromMetaClient(metaClient).setTimelineLayoutVersion(VERSION_0).initTable(metaClient.getHadoopConf(), metaClient.getBasePath());
    HoodieInstant instant6 = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, "9");
    byte[] dummy = new byte[5];
    HoodieActiveTimeline oldTimeline = new HoodieActiveTimeline(HoodieTableMetaClient.builder().setConf(metaClient.getHadoopConf()).setBasePath(metaClient.getBasePath()).setLoadActiveTimelineOnLoad(true).setConsistencyGuardConfig(metaClient.getConsistencyGuardConfig()).setFileSystemRetryConfig(metaClient.getFileSystemRetryConfig()).setLayoutVersion(Option.of(new TimelineLayoutVersion(VERSION_0))).build());
    // Old Timeline writes both to aux and timeline folder
    oldTimeline.saveToCompactionRequested(instant6, Option.of(dummy));
    // Now use latest timeline version
    timeline = timeline.reload();
    // Ensure aux file is present
    assertTrue(metaClient.getFs().exists(new Path(metaClient.getMetaAuxiliaryPath(), instant6.getFileName())));
    // Read 5 bytes
    assertEquals(5, timeline.readCompactionPlanAsBytes(instant6).get().length);
    // Delete auxiliary file to mimic future release where we stop writing to aux
    metaClient.getFs().delete(new Path(metaClient.getMetaAuxiliaryPath(), instant6.getFileName()));
    // Ensure requested instant is not present in aux
    assertFalse(metaClient.getFs().exists(new Path(metaClient.getMetaAuxiliaryPath(), instant6.getFileName())));
    // Now read compaction plan again which should not throw exception
    assertEquals(5, timeline.readCompactionPlanAsBytes(instant6).get().length);
}
Also used : Path(org.apache.hadoop.fs.Path) TimelineLayoutVersion(org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion) Test(org.junit.jupiter.api.Test)

Example 7 with TimelineLayoutVersion

use of org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion in project hudi by apache.

the class SparkMain method upgradeOrDowngradeTable.

/**
 * Upgrade or downgrade table.
 *
 * @param jsc instance of {@link JavaSparkContext} to use.
 * @param basePath base path of the dataset.
 * @param toVersion version to which upgrade/downgrade to be done.
 * @return 0 if success, else -1.
 * @throws Exception
 */
protected static int upgradeOrDowngradeTable(JavaSparkContext jsc, String basePath, String toVersion) {
    HoodieWriteConfig config = getWriteConfig(basePath, Boolean.parseBoolean(HoodieWriteConfig.ROLLBACK_USING_MARKERS_ENABLE.defaultValue()));
    HoodieTableMetaClient metaClient = HoodieTableMetaClient.builder().setConf(jsc.hadoopConfiguration()).setBasePath(config.getBasePath()).setLoadActiveTimelineOnLoad(false).setConsistencyGuardConfig(config.getConsistencyGuardConfig()).setLayoutVersion(Option.of(new TimelineLayoutVersion(config.getTimelineLayoutVersion()))).setFileSystemRetryConfig(config.getFileSystemRetryConfig()).build();
    try {
        new UpgradeDowngrade(metaClient, config, new HoodieSparkEngineContext(jsc), SparkUpgradeDowngradeHelper.getInstance()).run(HoodieTableVersion.valueOf(toVersion), null);
        LOG.info(String.format("Table at \"%s\" upgraded / downgraded to version \"%s\".", basePath, toVersion));
        return 0;
    } catch (Exception e) {
        LOG.warn(String.format("Failed: Could not upgrade/downgrade table at \"%s\" to version \"%s\".", basePath, toVersion), e);
        return -1;
    }
}
Also used : HoodieTableMetaClient(org.apache.hudi.common.table.HoodieTableMetaClient) HoodieSparkEngineContext(org.apache.hudi.client.common.HoodieSparkEngineContext) UpgradeDowngrade(org.apache.hudi.table.upgrade.UpgradeDowngrade) HoodieWriteConfig(org.apache.hudi.config.HoodieWriteConfig) TimelineLayoutVersion(org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion) HoodieSavepointException(org.apache.hudi.exception.HoodieSavepointException) IOException(java.io.IOException)

Aggregations

TimelineLayoutVersion (org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion)7 HoodieWriteConfig (org.apache.hudi.config.HoodieWriteConfig)5 HashMap (java.util.HashMap)4 SparkRDDWriteClient (org.apache.hudi.client.SparkRDDWriteClient)4 Path (org.apache.hadoop.fs.Path)3 Test (org.junit.jupiter.api.Test)3 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)3 ArrayList (java.util.ArrayList)2 List (java.util.List)2 FileSlice (org.apache.hudi.common.model.FileSlice)2 HoodieInstant (org.apache.hudi.common.table.timeline.HoodieInstant)2 HoodieTable (org.apache.hudi.table.HoodieTable)2 WriteMarkers (org.apache.hudi.table.marker.WriteMarkers)2 IOException (java.io.IOException)1 HoodieSparkEngineContext (org.apache.hudi.client.common.HoodieSparkEngineContext)1 HoodieTableConfig (org.apache.hudi.common.table.HoodieTableConfig)1 HoodieTableMetaClient (org.apache.hudi.common.table.HoodieTableMetaClient)1 HoodieTableVersion (org.apache.hudi.common.table.HoodieTableVersion)1 MarkerType (org.apache.hudi.common.table.marker.MarkerType)1 HoodieSavepointException (org.apache.hudi.exception.HoodieSavepointException)1