use of org.apache.flink.table.catalog.hive.HiveCatalog in project flink by apache.
the class HiveCatalogFactoryTest method testLoadHadoopConfigFromEnv.
@Test
public void testLoadHadoopConfigFromEnv() throws IOException {
Map<String, String> customProps = new HashMap<>();
String k1 = "what is connector?";
String v1 = "Hive";
final String catalogName = "HiveCatalog";
// set HADOOP_CONF_DIR env
final File hadoopConfDir = tempFolder.newFolder();
final File hdfsSiteFile = new File(hadoopConfDir, "hdfs-site.xml");
writeProperty(hdfsSiteFile, k1, v1);
customProps.put(k1, v1);
// add mapred-site file
final File mapredSiteFile = new File(hadoopConfDir, "mapred-site.xml");
k1 = "mapred.site.config.key";
v1 = "mapred.site.config.val";
writeProperty(mapredSiteFile, k1, v1);
customProps.put(k1, v1);
final Map<String, String> originalEnv = System.getenv();
final Map<String, String> newEnv = new HashMap<>(originalEnv);
newEnv.put("HADOOP_CONF_DIR", hadoopConfDir.getAbsolutePath());
newEnv.remove("HADOOP_HOME");
CommonTestUtils.setEnv(newEnv);
// create HiveCatalog use the Hadoop Configuration
final HiveConf hiveConf;
try {
final Map<String, String> options = new HashMap<>();
options.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
options.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), CONF_DIR.getPath());
final HiveCatalog hiveCatalog = (HiveCatalog) FactoryUtil.createCatalog(catalogName, options, null, Thread.currentThread().getContextClassLoader());
hiveConf = hiveCatalog.getHiveConf();
} finally {
// set the Env back
CommonTestUtils.setEnv(originalEnv);
}
// validate the result
for (String key : customProps.keySet()) {
assertEquals(customProps.get(key), hiveConf.get(key, null));
}
}
use of org.apache.flink.table.catalog.hive.HiveCatalog in project flink by apache.
the class HiveCatalogFactoryTest method testCreateMultipleHiveCatalog.
@Test
public void testCreateMultipleHiveCatalog() throws Exception {
final Map<String, String> props1 = new HashMap<>();
props1.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
props1.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), Thread.currentThread().getContextClassLoader().getResource("test-multi-hive-conf1").getPath());
final Map<String, String> props2 = new HashMap<>();
props2.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
props2.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), Thread.currentThread().getContextClassLoader().getResource("test-multi-hive-conf2").getPath());
Callable<Catalog> callable1 = () -> FactoryUtil.createCatalog("cat1", props1, null, Thread.currentThread().getContextClassLoader());
Callable<Catalog> callable2 = () -> FactoryUtil.createCatalog("cat2", props2, null, Thread.currentThread().getContextClassLoader());
ExecutorService executorService = Executors.newFixedThreadPool(2);
Future<Catalog> future1 = executorService.submit(callable1);
Future<Catalog> future2 = executorService.submit(callable2);
executorService.shutdown();
HiveCatalog catalog1 = (HiveCatalog) future1.get();
HiveCatalog catalog2 = (HiveCatalog) future2.get();
// verify we read our own props
assertEquals("val1", catalog1.getHiveConf().get("key"));
assertNotNull(catalog1.getHiveConf().get("conf1", null));
// verify we don't read props from other conf
assertNull(catalog1.getHiveConf().get("conf2", null));
// verify we read our own props
assertEquals("val2", catalog2.getHiveConf().get("key"));
assertNotNull(catalog2.getHiveConf().get("conf2", null));
// verify we don't read props from other conf
assertNull(catalog2.getHiveConf().get("conf1", null));
}
use of org.apache.flink.table.catalog.hive.HiveCatalog in project flink by apache.
the class HiveCatalogFactoryTest method testCreateHiveCatalog.
@Test
public void testCreateHiveCatalog() {
final String catalogName = "mycatalog";
final HiveCatalog expectedCatalog = HiveTestUtils.createHiveCatalog(catalogName, null);
final Map<String, String> options = new HashMap<>();
options.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
options.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), CONF_DIR.getPath());
final Catalog actualCatalog = FactoryUtil.createCatalog(catalogName, options, null, Thread.currentThread().getContextClassLoader());
assertEquals("dummy-hms", ((HiveCatalog) actualCatalog).getHiveConf().getVar(HiveConf.ConfVars.METASTOREURIS));
checkEquals(expectedCatalog, (HiveCatalog) actualCatalog);
}
use of org.apache.flink.table.catalog.hive.HiveCatalog in project flink by apache.
the class HiveCatalogFactoryTest method testCreateHiveCatalogWithHadoopConfDir.
@Test
public void testCreateHiveCatalogWithHadoopConfDir() throws IOException {
final String catalogName = "mycatalog";
final String hadoopConfDir = tempFolder.newFolder().getAbsolutePath();
final File mapredSiteFile = new File(hadoopConfDir, "mapred-site.xml");
final String mapredKey = "mapred.site.config.key";
final String mapredVal = "mapred.site.config.val";
writeProperty(mapredSiteFile, mapredKey, mapredVal);
final HiveCatalog expectedCatalog = HiveTestUtils.createHiveCatalog(catalogName, CONF_DIR.getPath(), hadoopConfDir, null);
final Map<String, String> options = new HashMap<>();
options.put(CommonCatalogOptions.CATALOG_TYPE.key(), HiveCatalogFactoryOptions.IDENTIFIER);
options.put(HiveCatalogFactoryOptions.HIVE_CONF_DIR.key(), CONF_DIR.getPath());
options.put(HiveCatalogFactoryOptions.HADOOP_CONF_DIR.key(), hadoopConfDir);
final Catalog actualCatalog = FactoryUtil.createCatalog(catalogName, options, null, Thread.currentThread().getContextClassLoader());
checkEquals(expectedCatalog, (HiveCatalog) actualCatalog);
assertEquals(mapredVal, ((HiveCatalog) actualCatalog).getHiveConf().get(mapredKey));
}
use of org.apache.flink.table.catalog.hive.HiveCatalog in project flink by apache.
the class HiveTableSourceITCase method testSourceConfig.
private void testSourceConfig(boolean fallbackMR, boolean inferParallelism) throws Exception {
HiveDynamicTableFactory tableFactorySpy = spy((HiveDynamicTableFactory) hiveCatalog.getFactory().get());
doAnswer(invocation -> {
TableSourceFactory.Context context = invocation.getArgument(0);
assertEquals(fallbackMR, context.getConfiguration().get(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER));
return new TestConfigSource(new JobConf(hiveCatalog.getHiveConf()), context.getConfiguration(), context.getObjectIdentifier().toObjectPath(), context.getTable(), inferParallelism);
}).when(tableFactorySpy).createDynamicTableSource(any(DynamicTableFactory.Context.class));
HiveCatalog catalogSpy = spy(hiveCatalog);
doReturn(Optional.of(tableFactorySpy)).when(catalogSpy).getTableFactory();
TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode();
tableEnv.getConfig().getConfiguration().setBoolean(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER, fallbackMR);
tableEnv.getConfig().getConfiguration().setBoolean(HiveOptions.TABLE_EXEC_HIVE_INFER_SOURCE_PARALLELISM, inferParallelism);
tableEnv.getConfig().getConfiguration().setInteger(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 2);
tableEnv.registerCatalog(catalogSpy.getName(), catalogSpy);
tableEnv.useCatalog(catalogSpy.getName());
List<Row> results = CollectionUtil.iteratorToList(tableEnv.sqlQuery("select * from db1.src order by x").execute().collect());
assertEquals("[+I[1, a], +I[2, b]]", results.toString());
}
Aggregations