use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestColumnAccess method createDriver.
private static Driver createDriver() {
HiveConf conf = new HiveConf(Driver.class);
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
conf.setBoolVar(HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS, true);
SessionState.start(conf);
Driver driver = new Driver(conf);
driver.init();
return driver;
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestGenTezWork method setUp.
/**
* @throws java.lang.Exception
*/
@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
// Init conf
final HiveConf conf = new HiveConf(SemanticAnalyzer.class);
SessionState.start(conf);
// Init parse context
final ParseContext pctx = new ParseContext();
pctx.setContext(new Context(conf));
ctx = new GenTezProcContext(conf, pctx, Collections.EMPTY_LIST, new ArrayList<Task<? extends Serializable>>(), Collections.EMPTY_SET, Collections.EMPTY_SET);
proc = new GenTezWork(new GenTezUtils() {
@Override
protected void setupMapWork(MapWork mapWork, GenTezProcContext context, PrunedPartitionList partitions, TableScanOperator root, String alias) throws SemanticException {
LinkedHashMap<String, Operator<? extends OperatorDesc>> map = new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
map.put("foo", root);
mapWork.setAliasToWork(map);
return;
}
});
CompilationOpContext cCtx = new CompilationOpContext();
fs = new FileSinkOperator(cCtx);
fs.setConf(new FileSinkDesc());
rs = new ReduceSinkOperator(cCtx);
rs.setConf(new ReduceSinkDesc());
TableDesc tableDesc = new TableDesc();
tableDesc.setProperties(new Properties());
rs.getConf().setKeySerializeInfo(tableDesc);
ts = new TableScanOperator(cCtx);
ts.setConf(new TableScanDesc(null));
ts.getChildOperators().add(rs);
rs.getParentOperators().add(ts);
rs.getChildOperators().add(fs);
fs.getParentOperators().add(rs);
ctx.preceedingWork = null;
ctx.currentRootOperator = ts;
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestConditionalResolverCommonJoin method testResolvingDriverAlias.
@Test
public void testResolvingDriverAlias() throws Exception {
ConditionalResolverCommonJoin resolver = new ConditionalResolverCommonJoin();
HashMap<Path, ArrayList<String>> pathToAliases = new HashMap<>();
pathToAliases.put(new Path("path1"), new ArrayList<String>(Arrays.asList("alias1", "alias2")));
pathToAliases.put(new Path("path2"), new ArrayList<String>(Arrays.asList("alias3")));
HashMap<String, Long> aliasToKnownSize = new HashMap<String, Long>();
aliasToKnownSize.put("alias1", 1024l);
aliasToKnownSize.put("alias2", 2048l);
aliasToKnownSize.put("alias3", 4096l);
DDLTask task1 = new DDLTask();
task1.setId("alias2");
DDLTask task2 = new DDLTask();
task2.setId("alias3");
// joins alias1, alias2, alias3 (alias1 was not eligible for big pos)
// Must be deterministic order map for consistent q-test output across Java versions
HashMap<Task<? extends Serializable>, Set<String>> taskToAliases = new LinkedHashMap<Task<? extends Serializable>, Set<String>>();
taskToAliases.put(task1, new HashSet<String>(Arrays.asList("alias2")));
taskToAliases.put(task2, new HashSet<String>(Arrays.asList("alias3")));
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx ctx = new ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx();
ctx.setPathToAliases(pathToAliases);
ctx.setTaskToAliases(taskToAliases);
ctx.setAliasToKnownSize(aliasToKnownSize);
HiveConf conf = new HiveConf();
conf.setLongVar(HiveConf.ConfVars.HIVESMALLTABLESFILESIZE, 4096);
// alias3 only can be selected
Task resolved = resolver.resolveMapJoinTask(ctx, conf);
Assert.assertEquals("alias3", resolved.getId());
conf.setLongVar(HiveConf.ConfVars.HIVESMALLTABLESFILESIZE, 65536);
// alias1, alias2, alias3 all can be selected but overriden by biggest one (alias3)
resolved = resolver.resolveMapJoinTask(ctx, conf);
Assert.assertEquals("alias3", resolved.getId());
conf.setLongVar(HiveConf.ConfVars.HIVESMALLTABLESFILESIZE, 2048);
// not selected
resolved = resolver.resolveMapJoinTask(ctx, conf);
Assert.assertNull(resolved);
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestReadEntityDirect method createDriver.
/**
* Create driver with the test hook set in config
*/
private static Driver createDriver() {
HiveConf conf = new HiveConf(Driver.class);
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
conf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, CheckInputReadEntityDirect.class.getName());
HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
SessionState.start(conf);
Driver driver = new Driver(conf);
driver.init();
return driver;
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestViewEntity method onetimeSetup.
@BeforeClass
public static void onetimeSetup() throws Exception {
HiveConf conf = new HiveConf(Driver.class);
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
conf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, CheckInputReadEntity.class.getName());
HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
SessionState.start(conf);
driver = new Driver(conf);
driver.init();
}
Aggregations