use of com.facebook.presto.hive.HiveHadoop2Plugin in project presto by prestodb.
the class TestPrestoSparkLauncherIntegrationSmokeTest method setUp.
@BeforeClass
public void setUp() throws Exception {
assertEquals(DateTimeZone.getDefault(), TIME_ZONE, "Timezone not configured correctly. Add -Duser.timezone=America/Bahia_Banderas to your JVM arguments");
// the default temporary directory location on MacOS is not sharable to docker
tempDir = new File("/tmp", randomUUID().toString());
createDirectories(tempDir.toPath());
sparkWorkDirectory = new File(tempDir, "work");
createDirectories(sparkWorkDirectory.toPath());
File composeYaml = extractResource("docker-compose.yml", tempDir);
dockerCompose = new DockerCompose(composeYaml);
dockerCompose.verifyInstallation();
dockerCompose.pull();
composeProcess = dockerCompose.up(ImmutableMap.of("spark-master", 1, "spark-worker", 2, "hadoop-master", 1));
Session session = testSessionBuilder().setCatalog("hive").setSchema("default").build();
localQueryRunner = new LocalQueryRunner(session);
HiveHadoop2Plugin plugin = new HiveHadoop2Plugin();
ConnectorFactory hiveConnectorFactory = getOnlyElement(plugin.getConnectorFactories());
addStaticResolution("hadoop-master", "127.0.0.1");
String hadoopMasterAddress = dockerCompose.getContainerAddress("hadoop-master");
// datanode is accessed via the internal docker IP address that is not accessible from the host
addStaticResolution(hadoopMasterAddress, "127.0.0.1");
localQueryRunner.createCatalog("hive", hiveConnectorFactory, ImmutableMap.of("hive.metastore.uri", "thrift://127.0.0.1:9083", "hive.time-zone", TIME_ZONE.getID(), "hive.experimental-optimized-partition-update-serialization-enabled", "true"));
localQueryRunner.createCatalog("tpch", new TpchConnectorFactory(), ImmutableMap.of());
// it may take some time for the docker container to start
ensureHiveIsRunning(localQueryRunner, new Duration(10, MINUTES));
importTables(localQueryRunner, "lineitem", "orders");
importTablesBucketed(localQueryRunner, ImmutableList.of("orderkey"), "lineitem", "orders");
File projectRoot = resolveProjectRoot();
prestoLauncher = resolveFile(new File(projectRoot, "presto-spark-launcher/target"), Pattern.compile("presto-spark-launcher-[\\d\\.]+(-SNAPSHOT)?\\.jar"));
logPackageInfo(prestoLauncher);
prestoPackage = resolveFile(new File(projectRoot, "presto-spark-package/target"), Pattern.compile("presto-spark-package-.+\\.tar\\.gz"));
logPackageInfo(prestoPackage);
configProperties = new File(tempDir, "config.properties");
storeProperties(configProperties, ImmutableMap.of("query.hash-partition-count", "10"));
catalogDirectory = new File(tempDir, "catalogs");
createDirectories(catalogDirectory.toPath());
storeProperties(new File(catalogDirectory, "hive.properties"), ImmutableMap.of("connector.name", "hive-hadoop2", "hive.metastore.uri", "thrift://hadoop-master:9083", // the getnetgrent dependency is missing
"hive.dfs.require-hadoop-native", "false", "hive.time-zone", TIME_ZONE.getID()));
storeProperties(new File(catalogDirectory, "tpch.properties"), ImmutableMap.of("connector.name", "tpch", "tpch.splits-per-node", "4", "tpch.partitioning-enabled", "false"));
}
use of com.facebook.presto.hive.HiveHadoop2Plugin in project presto by prestodb.
the class TestJdbcConnection method setupServer.
@BeforeClass
public void setupServer() throws Exception {
Logging.initialize();
Module systemTables = binder -> newSetBinder(binder, SystemTable.class).addBinding().to(ExtraCredentialsSystemTable.class).in(Scopes.SINGLETON);
server = new TestingPrestoServer(ImmutableList.of(systemTables));
server.installPlugin(new HiveHadoop2Plugin());
server.createCatalog("hive", "hive-hadoop2", ImmutableMap.<String, String>builder().put("hive.metastore", "file").put("hive.metastore.catalog.dir", server.getBaseDataDir().resolve("hive").toFile().toURI().toString()).put("hive.security", "sql-standard").build());
try (Connection connection = createConnection();
Statement statement = connection.createStatement()) {
statement.execute("SET ROLE admin");
statement.execute("CREATE SCHEMA default");
statement.execute("CREATE SCHEMA fruit");
}
}
Aggregations