use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class PTFTranslator method translate.
private PTFExpressionDef translate(ShapeDetails inpShape, PartitionExpression pExpr) throws SemanticException {
PTFExpressionDef expDef = null;
try {
expDef = buildExpressionDef(inpShape, pExpr.getExpression());
} catch (HiveException he) {
throw new SemanticException(he);
}
PTFTranslator.validateComparable(expDef.getOI(), String.format("Partition Expression %s is not a comparable expression", pExpr.getExpression().toStringTree()));
return expDef;
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class DDLSemanticAnalyzer method preparePartitions.
private List<Partition> preparePartitions(org.apache.hadoop.hive.ql.metadata.Table baseTbl, HashMap<String, String> partSpec, org.apache.hadoop.hive.ql.metadata.Table indexTbl, Hive db, List<Partition> indexTblPartitions) throws HiveException, MetaException {
List<Partition> baseTblPartitions = new ArrayList<Partition>();
if (partSpec != null) {
// if partspec is specified, then only producing index for that
// partition
Partition part = db.getPartition(baseTbl, partSpec, false);
if (part == null) {
throw new HiveException("Partition " + Warehouse.makePartName(partSpec, false) + " does not exist in table " + baseTbl.getTableName());
}
baseTblPartitions.add(part);
Partition indexPart = db.getPartition(indexTbl, partSpec, false);
if (indexPart == null) {
indexPart = db.createPartition(indexTbl, partSpec);
}
indexTblPartitions.add(indexPart);
} else if (baseTbl.isPartitioned()) {
// if no partition is specified, create indexes for all partitions one
// by one.
baseTblPartitions = db.getPartitions(baseTbl);
for (Partition basePart : baseTblPartitions) {
HashMap<String, String> pSpec = basePart.getSpec();
Partition indexPart = db.getPartition(indexTbl, pSpec, false);
if (indexPart == null) {
indexPart = db.createPartition(indexTbl, pSpec);
}
indexTblPartitions.add(indexPart);
}
}
return baseTblPartitions;
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class PTFDeserializer method initialize.
protected void initialize(ShapeDetails shp, StructObjectInspector OI) throws HiveException {
String serdeClassName = shp.getSerdeClassName();
Properties serDeProps = new Properties();
Map<String, String> serdePropsMap = new LinkedHashMap<String, String>();
addOIPropertiestoSerDePropsMap(OI, serdePropsMap);
for (String serdeName : serdePropsMap.keySet()) {
serDeProps.setProperty(serdeName, serdePropsMap.get(serdeName));
}
try {
AbstractSerDe serDe = ReflectionUtils.newInstance(hConf.getClassByName(serdeClassName).asSubclass(AbstractSerDe.class), hConf);
SerDeUtils.initializeSerDe(serDe, hConf, serDeProps, null);
shp.setSerde(serDe);
StructObjectInspector outOI = PTFPartition.setupPartitionOutputOI(serDe, OI);
shp.setOI(outOI);
} catch (Exception se) {
throw new HiveException(se);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class SparkPartitionPruningSinkOperator method process.
@Override
public void process(Object row, int tag) throws HiveException {
ObjectInspector rowInspector = inputObjInspectors[0];
try {
Writable writableRow = serializer.serialize(row, rowInspector);
writableRow.write(buffer);
} catch (Exception e) {
throw new HiveException(e);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class Driver method run.
public CommandProcessorResponse run(String command, boolean alreadyCompiled) throws CommandNeedRetryException {
CommandProcessorResponse cpr = runInternal(command, alreadyCompiled);
if (cpr.getResponseCode() == 0) {
return cpr;
}
SessionState ss = SessionState.get();
if (ss == null) {
return cpr;
}
MetaDataFormatter mdf = MetaDataFormatUtils.getFormatter(ss.getConf());
if (!(mdf instanceof JsonMetaDataFormatter)) {
return cpr;
}
/*Here we want to encode the error in machine readable way (e.g. JSON)
* Ideally, errorCode would always be set to a canonical error defined in ErrorMsg.
* In practice that is rarely the case, so the messy logic below tries to tease
* out canonical error code if it can. Exclude stack trace from output when
* the error is a specific/expected one.
* It's written to stdout for backward compatibility (WebHCat consumes it).*/
try {
if (downstreamError == null) {
mdf.error(ss.out, errorMessage, cpr.getResponseCode(), SQLState);
return cpr;
}
ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpr.getResponseCode());
if (canonicalErr != null && canonicalErr != ErrorMsg.GENERIC_ERROR) {
/*Some HiveExceptions (e.g. SemanticException) don't set
canonical ErrorMsg explicitly, but there is logic
(e.g. #compile()) to find an appropriate canonical error and
return its code as error code. In this case we want to
preserve it for downstream code to interpret*/
mdf.error(ss.out, errorMessage, cpr.getResponseCode(), SQLState, null);
return cpr;
}
if (downstreamError instanceof HiveException) {
HiveException rc = (HiveException) downstreamError;
mdf.error(ss.out, errorMessage, rc.getCanonicalErrorMsg().getErrorCode(), SQLState, rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ? org.apache.hadoop.util.StringUtils.stringifyException(rc) : null);
} else {
ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(downstreamError.getMessage());
mdf.error(ss.out, errorMessage, canonicalMsg.getErrorCode(), SQLState, org.apache.hadoop.util.StringUtils.stringifyException(downstreamError));
}
} catch (HiveException ex) {
console.printError("Unable to JSON-encode the error", org.apache.hadoop.util.StringUtils.stringifyException(ex));
}
return cpr;
}
Aggregations