use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class SessionHiveMetaStoreClient method createTempTable.
private void createTempTable(org.apache.hadoop.hive.metastore.api.Table tbl, EnvironmentContext envContext) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException {
boolean isVirtualTable = tbl.getTableName().startsWith(SemanticAnalyzer.VALUES_TMP_TABLE_NAME_PREFIX);
SessionState ss = SessionState.get();
if (ss == null) {
throw new MetaException("No current SessionState, cannot create temporary table" + Warehouse.getQualifiedName(tbl));
}
// We may not own the table object, create a copy
tbl = deepCopyAndLowerCaseTable(tbl);
String dbName = tbl.getDbName();
String tblName = tbl.getTableName();
Map<String, Table> tables = getTempTablesForDatabase(dbName);
if (tables != null && tables.containsKey(tblName)) {
throw new MetaException("Temporary table " + StatsUtils.getFullyQualifiedTableName(dbName, tblName) + " already exists");
}
// Create temp table directory
Warehouse wh = getWh();
Path tblPath = wh.getDnsPath(new Path(tbl.getSd().getLocation()));
if (tblPath == null) {
throw new MetaException("Temp table path not set for " + tbl.getTableName());
} else {
if (!wh.isDir(tblPath)) {
if (!wh.mkdirs(tblPath)) {
throw new MetaException(tblPath + " is not a directory or unable to create one");
}
}
// Make sure location string is in proper format
tbl.getSd().setLocation(tblPath.toString());
}
// Add temp table info to current session
Table tTable = new Table(tbl);
if (!isVirtualTable) {
StatsSetupConst.setStatsStateForCreateTable(tbl.getParameters(), org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getColumnNamesForTable(tbl), StatsSetupConst.TRUE);
}
if (tables == null) {
tables = new HashMap<String, Table>();
ss.getTempTables().put(dbName, tables);
}
tables.put(tblName, tTable);
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class TextMetaDataFormatter method showTablePartitions.
/**
* Show the table partitions.
*/
@Override
public void showTablePartitions(DataOutputStream outStream, List<String> parts) throws HiveException {
try {
for (String part : parts) {
// Partition names are URL encoded. We decode the names unless Hive
// is configured to use the encoded names.
SessionState ss = SessionState.get();
if (ss != null && ss.getConf() != null && !ss.getConf().getBoolVar(HiveConf.ConfVars.HIVE_DECODE_PARTITION_NAME)) {
outStream.write(part.getBytes("UTF-8"));
} else {
outStream.write(FileUtils.unescapePathName(part).getBytes("UTF-8"));
}
outStream.write(terminator);
}
} catch (IOException e) {
throw new HiveException(e);
}
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class TestTxnCommands2 method setUpWithTableProperties.
void setUpWithTableProperties(String tableProperties) throws Exception {
hiveConf = new HiveConf(this.getClass());
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
hiveConf.setVar(HiveConf.ConfVars.HIVEINPUTFORMAT, HiveInputFormat.class.getName());
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
hiveConf.setBoolVar(HiveConf.ConfVars.MERGE_CARDINALITY_VIOLATION_CHECK, true);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVESTATSCOLAUTOGATHER, false);
TxnDbUtil.setConfValues(hiveConf);
TxnDbUtil.prepDb(hiveConf);
File f = new File(TEST_WAREHOUSE_DIR);
if (f.exists()) {
FileUtil.fullyDelete(f);
}
if (!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
}
SessionState ss = SessionState.start(hiveConf);
ss.applyAuthorizationPolicy();
d = new Driver(new QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build(), null);
d.setMaxRows(10000);
dropTables();
runStatementOnDriver("create table " + Table.ACIDTBL + "(a int, b int) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES (" + tableProperties + ")");
runStatementOnDriver("create table " + Table.ACIDTBLPART + "(a int, b int) partitioned by (p string) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES (" + tableProperties + ")");
runStatementOnDriver("create table " + Table.NONACIDORCTBL + "(a int, b int) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES ('transactional'='false')");
runStatementOnDriver("create table " + Table.NONACIDPART + "(a int, b int) partitioned by (p string) stored as orc TBLPROPERTIES ('transactional'='false')");
runStatementOnDriver("create table " + Table.NONACIDPART2 + "(a2 int, b2 int) partitioned by (p2 string) stored as orc TBLPROPERTIES ('transactional'='false')");
runStatementOnDriver("create table " + Table.ACIDNESTEDPART + "(a int, b int) partitioned by (p int, q int) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES (" + tableProperties + ")");
runStatementOnDriver("create table " + Table.MMTBL + "(a int, b int) TBLPROPERTIES ('transactional'='true', 'transactional_properties'='insert_only')");
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class TxnCommandsBaseForTests method setUpInternal.
void setUpInternal() throws Exception {
initHiveConf();
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, getWarehouseDir());
hiveConf.setVar(HiveConf.ConfVars.HIVEINPUTFORMAT, HiveInputFormat.class.getName());
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
hiveConf.setBoolVar(HiveConf.ConfVars.MERGE_CARDINALITY_VIOLATION_CHECK, true);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVESTATSCOLAUTOGATHER, false);
TxnDbUtil.setConfValues(hiveConf);
TxnDbUtil.prepDb(hiveConf);
File f = new File(getWarehouseDir());
if (f.exists()) {
FileUtil.fullyDelete(f);
}
if (!(new File(getWarehouseDir()).mkdirs())) {
throw new RuntimeException("Could not create " + getWarehouseDir());
}
SessionState ss = SessionState.start(hiveConf);
ss.applyAuthorizationPolicy();
d = new Driver(new QueryState.Builder().withHiveConf(hiveConf).nonIsolated().build(), null);
d.setMaxRows(10000);
dropTables();
runStatementOnDriver("create table " + Table.ACIDTBL + "(a int, b int) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES ('transactional'='true')");
runStatementOnDriver("create table " + Table.ACIDTBLPART + "(a int, b int) partitioned by (p string) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES ('transactional'='true')");
runStatementOnDriver("create table " + Table.NONACIDORCTBL + "(a int, b int) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES ('transactional'='false')");
runStatementOnDriver("create table " + Table.NONACIDORCTBL2 + "(a int, b int) clustered by (a) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES ('transactional'='false')");
runStatementOnDriver("create temporary table " + Table.ACIDTBL2 + "(a int, b int, c int) clustered by (c) into " + BUCKET_COUNT + " buckets stored as orc TBLPROPERTIES ('transactional'='true')");
runStatementOnDriver("create table " + Table.NONACIDNONBUCKET + "(a int, b int) stored as orc TBLPROPERTIES ('transactional'='false')");
}
use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.
the class TestKeyWrapperFactory method setup.
@Before
public void setup() throws Exception {
SessionState ss = new SessionState(new HiveConf());
SessionState.setCurrentSessionState(ss);
ArrayList<Text> col1 = new ArrayList<Text>();
col1.add(new Text("0"));
col1.add(new Text("1"));
col1.add(new Text("2"));
col1.add(new Text("3"));
TypeInfo col1Type = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo);
ArrayList<Text> cola = new ArrayList<Text>();
cola.add(new Text("a"));
cola.add(new Text("b"));
cola.add(new Text("c"));
TypeInfo colaType = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo);
try {
ArrayList<Object> data = new ArrayList<Object>();
data.add(col1);
data.add(cola);
ArrayList<String> names = new ArrayList<String>();
names.add("col1");
names.add("cola");
ArrayList<TypeInfo> typeInfos = new ArrayList<TypeInfo>();
typeInfos.add(col1Type);
typeInfos.add(colaType);
TypeInfo dataType = TypeInfoFactory.getStructTypeInfo(names, typeInfos);
InspectableObject r = new InspectableObject();
ObjectInspector[] oi = new ObjectInspector[1];
r.o = data;
oi[0] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(dataType);
try {
// get a evaluator for a simple field expression
ExprNodeDesc exprDesc = new ExprNodeColumnDesc(colaType, "cola", "", false);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
ExprNodeEvaluator[] evals = new ExprNodeEvaluator[1];
evals[0] = eval;
ObjectInspector resultOI = eval.initialize(oi[0]);
ObjectInspector[] resultOIs = new ObjectInspector[1];
resultOIs[0] = resultOI;
factory = new KeyWrapperFactory(evals, oi, resultOIs);
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
} catch (Throwable e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
Aggregations