use of io.trino.plugin.deltalake.util.DockerizedDataLake in project trino by trinodb.
the class TestDeltaLakeAdlsConnectorSmokeTest method createDockerizedDataLake.
@Override
DockerizedDataLake createDockerizedDataLake() throws Exception {
String abfsSpecificCoreSiteXmlContent = Resources.toString(Resources.getResource("io/trino/plugin/deltalake/hdp3.1-core-site.xml.abfs-template"), UTF_8).replace("%ABFS_ACCESS_KEY%", accessKey).replace("%ABFS_ACCOUNT%", account);
FileAttribute<Set<PosixFilePermission>> posixFilePermissions = PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rw-r--r--"));
Path hadoopCoreSiteXmlTempFile = Files.createTempFile("core-site", ".xml", posixFilePermissions);
hadoopCoreSiteXmlTempFile.toFile().deleteOnExit();
Files.write(hadoopCoreSiteXmlTempFile, abfsSpecificCoreSiteXmlContent.getBytes(UTF_8));
return new DockerizedDataLake(getHadoopBaseImage(), ImmutableMap.of(), ImmutableMap.of(hadoopCoreSiteXmlTempFile.normalize().toAbsolutePath().toString(), "/etc/hadoop/conf/core-site.xml"));
}
use of io.trino.plugin.deltalake.util.DockerizedDataLake in project trino by trinodb.
the class TestDeltaLakeAdlsStorage method createQueryRunner.
@Override
protected QueryRunner createQueryRunner() throws Exception {
Path hadoopCoreSiteXmlTempFile = createHadoopCoreSiteXmlTempFileWithAbfsSettings();
dockerizedDataLake = closeAfterClass(new DockerizedDataLake(Optional.of(HADOOP_BASE_IMAGE), ImmutableMap.of("io/trino/plugin/deltalake/testing/resources/databricks", "/tmp/tpch-tiny"), ImmutableMap.of(hadoopCoreSiteXmlTempFile.toString(), "/etc/hadoop/conf/core-site.xml")));
testingHadoop = dockerizedDataLake.getTestingHadoop();
return createAbfsDeltaLakeQueryRunner(DELTA_CATALOG, SCHEMA_NAME, testingHadoop);
}
Aggregations