use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class FSDatabaseEvent method dbInMetadata.
@Override
public Database dbInMetadata(String dbNameToOverride) throws SemanticException {
try {
MetaData rv = EximUtil.readMetaData(fileSystem, dbMetadataFile);
Database dbObj = rv.getDatabase();
if (dbObj == null) {
throw new IllegalArgumentException("_metadata file read did not contain a db object - invalid dump.");
}
// override the db name if provided in repl load command
if (StringUtils.isNotBlank(dbNameToOverride)) {
dbObj.setName(dbNameToOverride);
}
return dbObj;
} catch (Exception e) {
throw new SemanticException(e);
}
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class ColumnStatsUpdateTask method constructColumnStatsFromInput.
private ColumnStatistics constructColumnStatsFromInput() throws SemanticException, MetaException {
String dbName = work.dbName();
String tableName = work.getTableName();
String partName = work.getPartName();
String colName = work.getColName();
String columnType = work.getColType();
ColumnStatisticsObj statsObj = new ColumnStatisticsObj();
// grammar prohibits more than 1 column so we are guaranteed to have only 1
// element in this lists.
statsObj.setColName(colName);
statsObj.setColType(columnType);
ColumnStatisticsData statsData = new ColumnStatisticsData();
if (columnType.equalsIgnoreCase("long") || columnType.equalsIgnoreCase("tinyint") || columnType.equalsIgnoreCase("smallint") || columnType.equalsIgnoreCase("int") || columnType.equalsIgnoreCase("bigint") || columnType.equalsIgnoreCase("timestamp")) {
LongColumnStatsDataInspector longStats = new LongColumnStatsDataInspector();
longStats.setNumNullsIsSet(false);
longStats.setNumDVsIsSet(false);
longStats.setLowValueIsSet(false);
longStats.setHighValueIsSet(false);
Map<String, String> mapProp = work.getMapProp();
for (Entry<String, String> entry : mapProp.entrySet()) {
String fName = entry.getKey();
String value = entry.getValue();
if (fName.equals("numNulls")) {
longStats.setNumNulls(Long.parseLong(value));
} else if (fName.equals("numDVs")) {
longStats.setNumDVs(Long.parseLong(value));
} else if (fName.equals("lowValue")) {
longStats.setLowValue(Long.parseLong(value));
} else if (fName.equals("highValue")) {
longStats.setHighValue(Long.parseLong(value));
} else {
throw new SemanticException("Unknown stat");
}
}
statsData.setLongStats(longStats);
statsObj.setStatsData(statsData);
} else if (columnType.equalsIgnoreCase("double") || columnType.equalsIgnoreCase("float")) {
DoubleColumnStatsDataInspector doubleStats = new DoubleColumnStatsDataInspector();
doubleStats.setNumNullsIsSet(false);
doubleStats.setNumDVsIsSet(false);
doubleStats.setLowValueIsSet(false);
doubleStats.setHighValueIsSet(false);
Map<String, String> mapProp = work.getMapProp();
for (Entry<String, String> entry : mapProp.entrySet()) {
String fName = entry.getKey();
String value = entry.getValue();
if (fName.equals("numNulls")) {
doubleStats.setNumNulls(Long.parseLong(value));
} else if (fName.equals("numDVs")) {
doubleStats.setNumDVs(Long.parseLong(value));
} else if (fName.equals("lowValue")) {
doubleStats.setLowValue(Double.parseDouble(value));
} else if (fName.equals("highValue")) {
doubleStats.setHighValue(Double.parseDouble(value));
} else {
throw new SemanticException("Unknown stat");
}
}
statsData.setDoubleStats(doubleStats);
statsObj.setStatsData(statsData);
} else if (columnType.equalsIgnoreCase("string") || columnType.toLowerCase().startsWith("char") || columnType.toLowerCase().startsWith("varchar")) {
// char(x),varchar(x) types
StringColumnStatsDataInspector stringStats = new StringColumnStatsDataInspector();
stringStats.setMaxColLenIsSet(false);
stringStats.setAvgColLenIsSet(false);
stringStats.setNumNullsIsSet(false);
stringStats.setNumDVsIsSet(false);
Map<String, String> mapProp = work.getMapProp();
for (Entry<String, String> entry : mapProp.entrySet()) {
String fName = entry.getKey();
String value = entry.getValue();
if (fName.equals("numNulls")) {
stringStats.setNumNulls(Long.parseLong(value));
} else if (fName.equals("numDVs")) {
stringStats.setNumDVs(Long.parseLong(value));
} else if (fName.equals("avgColLen")) {
stringStats.setAvgColLen(Double.parseDouble(value));
} else if (fName.equals("maxColLen")) {
stringStats.setMaxColLen(Long.parseLong(value));
} else {
throw new SemanticException("Unknown stat");
}
}
statsData.setStringStats(stringStats);
statsObj.setStatsData(statsData);
} else if (columnType.equalsIgnoreCase("boolean")) {
BooleanColumnStatsData booleanStats = new BooleanColumnStatsData();
booleanStats.setNumNullsIsSet(false);
booleanStats.setNumTruesIsSet(false);
booleanStats.setNumFalsesIsSet(false);
Map<String, String> mapProp = work.getMapProp();
for (Entry<String, String> entry : mapProp.entrySet()) {
String fName = entry.getKey();
String value = entry.getValue();
if (fName.equals("numNulls")) {
booleanStats.setNumNulls(Long.parseLong(value));
} else if (fName.equals("numTrues")) {
booleanStats.setNumTrues(Long.parseLong(value));
} else if (fName.equals("numFalses")) {
booleanStats.setNumFalses(Long.parseLong(value));
} else {
throw new SemanticException("Unknown stat");
}
}
statsData.setBooleanStats(booleanStats);
statsObj.setStatsData(statsData);
} else if (columnType.equalsIgnoreCase("binary")) {
BinaryColumnStatsData binaryStats = new BinaryColumnStatsData();
binaryStats.setNumNullsIsSet(false);
binaryStats.setAvgColLenIsSet(false);
binaryStats.setMaxColLenIsSet(false);
Map<String, String> mapProp = work.getMapProp();
for (Entry<String, String> entry : mapProp.entrySet()) {
String fName = entry.getKey();
String value = entry.getValue();
if (fName.equals("numNulls")) {
binaryStats.setNumNulls(Long.parseLong(value));
} else if (fName.equals("avgColLen")) {
binaryStats.setAvgColLen(Double.parseDouble(value));
} else if (fName.equals("maxColLen")) {
binaryStats.setMaxColLen(Long.parseLong(value));
} else {
throw new SemanticException("Unknown stat");
}
}
statsData.setBinaryStats(binaryStats);
statsObj.setStatsData(statsData);
} else if (columnType.toLowerCase().startsWith("decimal")) {
// decimal(a,b) type
DecimalColumnStatsDataInspector decimalStats = new DecimalColumnStatsDataInspector();
decimalStats.setNumNullsIsSet(false);
decimalStats.setNumDVsIsSet(false);
decimalStats.setLowValueIsSet(false);
decimalStats.setHighValueIsSet(false);
Map<String, String> mapProp = work.getMapProp();
for (Entry<String, String> entry : mapProp.entrySet()) {
String fName = entry.getKey();
String value = entry.getValue();
if (fName.equals("numNulls")) {
decimalStats.setNumNulls(Long.parseLong(value));
} else if (fName.equals("numDVs")) {
decimalStats.setNumDVs(Long.parseLong(value));
} else if (fName.equals("lowValue")) {
BigDecimal d = new BigDecimal(value);
decimalStats.setLowValue(new Decimal(ByteBuffer.wrap(d.unscaledValue().toByteArray()), (short) d.scale()));
} else if (fName.equals("highValue")) {
BigDecimal d = new BigDecimal(value);
decimalStats.setHighValue(new Decimal(ByteBuffer.wrap(d.unscaledValue().toByteArray()), (short) d.scale()));
} else {
throw new SemanticException("Unknown stat");
}
}
statsData.setDecimalStats(decimalStats);
statsObj.setStatsData(statsData);
} else if (columnType.equalsIgnoreCase("date")) {
DateColumnStatsDataInspector dateStats = new DateColumnStatsDataInspector();
Map<String, String> mapProp = work.getMapProp();
for (Entry<String, String> entry : mapProp.entrySet()) {
String fName = entry.getKey();
String value = entry.getValue();
if (fName.equals("numNulls")) {
dateStats.setNumNulls(Long.parseLong(value));
} else if (fName.equals("numDVs")) {
dateStats.setNumDVs(Long.parseLong(value));
} else if (fName.equals("lowValue")) {
// Date high/low value is stored as long in stats DB, but allow users to set high/low
// value using either date format (yyyy-mm-dd) or numeric format (days since epoch)
dateStats.setLowValue(readDateValue(value));
} else if (fName.equals("highValue")) {
dateStats.setHighValue(readDateValue(value));
} else {
throw new SemanticException("Unknown stat");
}
}
statsData.setDateStats(dateStats);
statsObj.setStatsData(statsData);
} else {
throw new SemanticException("Unsupported type");
}
ColumnStatisticsDesc statsDesc = getColumnStatsDesc(dbName, tableName, partName, partName == null);
ColumnStatistics colStat = new ColumnStatistics();
colStat.setStatsDesc(statsDesc);
colStat.addToStatsObj(statsObj);
return colStat;
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class SortMergeJoinTaskDispatcher method convertSMBWorkToJoinWork.
/*
* Convert the work containing to sort-merge join into a work, as if it had a regular join.
* Note that the operator tree is not changed - is still contains the SMB join, but the
* plan is changed (aliasToWork etc.) to contain all the paths as if it was a regular join.
*/
private MapredWork convertSMBWorkToJoinWork(MapredWork currWork, SMBMapJoinOperator oldSMBJoinOp) throws SemanticException {
try {
// deep copy a new mapred work
MapredWork currJoinWork = SerializationUtilities.clonePlan(currWork);
SMBMapJoinOperator newSMBJoinOp = getSMBMapJoinOp(currJoinWork);
// change the newly created map-red plan as if it was a join operator
genSMBJoinWork(currJoinWork.getMapWork(), newSMBJoinOp);
return currJoinWork;
} catch (Exception e) {
e.printStackTrace();
throw new SemanticException("Generate Map Join Task Error: " + e.getMessage());
}
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class CreateDatabaseHook method preAnalyze.
@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException {
Hive db;
try {
db = context.getHive();
} catch (HiveException e) {
throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
}
// Analyze and create tbl properties object
int numCh = ast.getChildCount();
databaseName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast.getChild(0));
for (int num = 1; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
switch(child.getToken().getType()) {
case HiveParser.TOK_IFNOTEXISTS:
try {
List<String> dbs = db.getDatabasesByPattern(databaseName);
if (dbs != null && dbs.size() > 0) {
// db exists
return ast;
}
} catch (HiveException e) {
throw new SemanticException(e);
}
break;
}
}
return ast;
}
use of org.apache.hadoop.hive.ql.parse.SemanticException in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method analyzePrivilegeListDef.
private List<PrivilegeDesc> analyzePrivilegeListDef(ASTNode node) throws SemanticException {
List<PrivilegeDesc> ret = new ArrayList<PrivilegeDesc>();
for (int i = 0; i < node.getChildCount(); i++) {
ASTNode privilegeDef = (ASTNode) node.getChild(i);
ASTNode privilegeType = (ASTNode) privilegeDef.getChild(0);
Privilege privObj = PrivilegeRegistry.getPrivilege(privilegeType.getType());
if (privObj == null) {
throw new SemanticException("Undefined privilege " + PrivilegeType.getPrivTypeByToken(privilegeType.getType()));
}
List<String> cols = null;
if (privilegeDef.getChildCount() > 1) {
cols = BaseSemanticAnalyzer.getColumnNames((ASTNode) privilegeDef.getChild(1));
}
PrivilegeDesc privilegeDesc = new PrivilegeDesc(privObj, cols);
ret.add(privilegeDesc);
}
return ret;
}
Aggregations