use of org.apache.hudi.common.testutils.minicluster.HdfsTestService in project hudi by apache.
the class FunctionalTestHarness method runBeforeEach.
@BeforeEach
public synchronized void runBeforeEach() throws Exception {
initialized = spark != null && hdfsTestService != null;
if (!initialized) {
SparkConf sparkConf = conf();
SparkRDDWriteClient.registerClasses(sparkConf);
HoodieReadClient.addHoodieSupport(sparkConf);
spark = SparkSession.builder().config(sparkConf).getOrCreate();
sqlContext = spark.sqlContext();
jsc = new JavaSparkContext(spark.sparkContext());
context = new HoodieSparkEngineContext(jsc);
hdfsTestService = new HdfsTestService();
dfsCluster = hdfsTestService.start(true);
dfs = dfsCluster.getFileSystem();
dfs.mkdirs(dfs.getWorkingDirectory());
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
hdfsTestService.stop();
hdfsTestService = null;
jsc.close();
jsc = null;
spark.stop();
spark = null;
}));
}
}
use of org.apache.hudi.common.testutils.minicluster.HdfsTestService in project hudi by apache.
the class HoodieClientTestHarness method initDFS.
/**
* Initializes a distributed file system and base directory.
*
* @throws IOException
*/
protected void initDFS() throws IOException {
hdfsTestService = new HdfsTestService();
dfsCluster = hdfsTestService.start(true);
// Create a temp folder as the base path
dfs = dfsCluster.getFileSystem();
dfsBasePath = dfs.getWorkingDirectory().toString();
this.basePath = dfsBasePath;
this.hadoopConf = dfs.getConf();
dfs.mkdirs(new Path(dfsBasePath));
}
use of org.apache.hudi.common.testutils.minicluster.HdfsTestService in project hudi by apache.
the class TestDFSPropertiesConfiguration method initClass.
@BeforeAll
public static void initClass() throws Exception {
hdfsTestService = new HdfsTestService();
dfsCluster = hdfsTestService.start(true);
// Create a temp folder as the base path
dfs = dfsCluster.getFileSystem();
dfsBasePath = dfs.getWorkingDirectory().toString();
dfs.mkdirs(new Path(dfsBasePath));
// create some files.
Path filePath = new Path(dfsBasePath + "/t1.props");
writePropertiesFile(filePath, new String[] { // to be ignored
"", // to be ignored
"#comment", // to be ignored
"abc", "int.prop=123", "double.prop=113.4", "string.prop=str", "boolean.prop=true", "long.prop=1354354354" });
filePath = new Path(dfsBasePath + "/t2.props");
writePropertiesFile(filePath, new String[] { "string.prop=ignored", "include=t1.props" });
filePath = new Path(dfsBasePath + "/t3.props");
writePropertiesFile(filePath, new String[] { "double.prop=838.3", "include = t2.props", "double.prop=243.4", "string.prop=t3.value" });
filePath = new Path(dfsBasePath + "/t4.props");
writePropertiesFile(filePath, new String[] { "double.prop=838.3", "include = t4.props" });
}
use of org.apache.hudi.common.testutils.minicluster.HdfsTestService in project hudi by apache.
the class TestInputPathHandler method setUpDFS.
@BeforeAll
public static void setUpDFS() throws IOException {
// Need to closeAll to clear FileSystem.Cache, required because DFS and LocalFS used in the
// same JVM
FileSystem.closeAll();
if (hdfsTestService == null) {
hdfsTestService = new HdfsTestService();
dfsCluster = hdfsTestService.start(true);
// Create a temp folder as the base path
dfs = dfsCluster.getFileSystem();
}
inputPaths = new ArrayList<>();
incrementalPaths = new ArrayList<>();
snapshotPaths = new ArrayList<>();
nonHoodiePaths = new ArrayList<>();
initTables();
}
use of org.apache.hudi.common.testutils.minicluster.HdfsTestService in project hudi by apache.
the class UtilitiesTestBase method initClass.
public static void initClass(boolean startHiveService) throws Exception {
hdfsTestService = new HdfsTestService();
zookeeperTestService = new ZookeeperTestService(hdfsTestService.getHadoopConf());
dfsCluster = hdfsTestService.start(true);
dfs = dfsCluster.getFileSystem();
dfsBasePath = dfs.getWorkingDirectory().toString();
dfs.mkdirs(new Path(dfsBasePath));
if (startHiveService) {
hiveTestService = new HiveTestService(hdfsTestService.getHadoopConf());
hiveServer = hiveTestService.start();
clearHiveDb();
}
zookeeperTestService.start();
}
Aggregations