use of org.apache.hudi.cli.commands.TableCommand in project hudi by apache.
the class ITTestCommitsCommand method init.
@BeforeEach
public void init() throws IOException {
String tableName = "test_table_" + ITTestCommitsCommand.class.getName();
String tablePath = Paths.get(basePath, tableName).toString();
HoodieCLI.conf = jsc.hadoopConfiguration();
// Create table and connect
new TableCommand().createTable(tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(), "", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
metaClient.setBasePath(tablePath);
metaClient = HoodieTableMetaClient.reload(metaClient);
}
use of org.apache.hudi.cli.commands.TableCommand in project hudi by apache.
the class ITTestClusteringCommand method init.
@BeforeEach
public void init() throws IOException {
tableName = "test_table_" + ITTestClusteringCommand.class.getName();
tablePath = Paths.get(basePath, tableName).toString();
HoodieCLI.conf = jsc.hadoopConfiguration();
// Create table and connect
new TableCommand().createTable(tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(), "", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
metaClient.setBasePath(tablePath);
metaClient = HoodieTableMetaClient.reload(metaClient);
}
use of org.apache.hudi.cli.commands.TableCommand in project hudi by apache.
the class ITTestCompactionCommand method init.
@BeforeEach
public void init() throws IOException {
tableName = "test_table_" + ITTestCompactionCommand.class.getName();
tablePath = Paths.get(basePath, tableName).toString();
HoodieCLI.conf = jsc.hadoopConfiguration();
// Create table and connect
new TableCommand().createTable(tablePath, tableName, HoodieTableType.MERGE_ON_READ.name(), "", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
metaClient.setBasePath(tablePath);
metaClient = HoodieTableMetaClient.reload(metaClient);
}
use of org.apache.hudi.cli.commands.TableCommand in project hudi by apache.
the class ITTestHDFSParquetImportCommand method testConvertWithInsert.
/**
* Test case for 'hdfsparquetimport' with insert.
*/
@Test
public void testConvertWithInsert() throws IOException {
String command = String.format("hdfsparquetimport --srcPath %s --targetPath %s --tableName %s " + "--tableType %s --rowKeyField %s" + " --partitionPathField %s --parallelism %s " + "--schemaFilePath %s --format %s --sparkMemory %s --retry %s --sparkMaster %s", sourcePath.toString(), targetPath.toString(), tableName, HoodieTableType.COPY_ON_WRITE.name(), "_row_key", "timestamp", "1", schemaFile, "parquet", "2G", "1", "local");
CommandResult cr = getShell().executeCommand(command);
assertAll("Command run success", () -> assertTrue(cr.isSuccess()), () -> assertEquals("Table imported to hoodie format", cr.getResult().toString()));
// Check hudi table exist
String metaPath = targetPath + Path.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME;
assertTrue(Files.exists(Paths.get(metaPath)), "Hoodie table not exist.");
// Load meta data
new TableCommand().connect(targetPath.toString(), TimelineLayoutVersion.VERSION_1, false, 2000, 300000, 7);
metaClient = HoodieCLI.getTableMetaClient();
assertEquals(1, metaClient.getActiveTimeline().getCommitsTimeline().countInstants(), "Should only 1 commit.");
verifyResultData(insertData);
}
Aggregations