use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class GenericUDTFGetSplits method convertTypeString.
private TypeDesc convertTypeString(String typeString) throws HiveException {
TypeDesc typeDesc;
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeString);
Preconditions.checkState(typeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE, "Unsupported non-primitive type " + typeString);
switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case BOOLEAN:
typeDesc = new TypeDesc(TypeDesc.Type.BOOLEAN);
break;
case BYTE:
typeDesc = new TypeDesc(TypeDesc.Type.TINYINT);
break;
case SHORT:
typeDesc = new TypeDesc(TypeDesc.Type.SMALLINT);
break;
case INT:
typeDesc = new TypeDesc(TypeDesc.Type.INT);
break;
case LONG:
typeDesc = new TypeDesc(TypeDesc.Type.BIGINT);
break;
case FLOAT:
typeDesc = new TypeDesc(TypeDesc.Type.FLOAT);
break;
case DOUBLE:
typeDesc = new TypeDesc(TypeDesc.Type.DOUBLE);
break;
case STRING:
typeDesc = new TypeDesc(TypeDesc.Type.STRING);
break;
case CHAR:
CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo;
typeDesc = new TypeDesc(TypeDesc.Type.CHAR, charTypeInfo.getLength());
break;
case VARCHAR:
VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo;
typeDesc = new TypeDesc(TypeDesc.Type.VARCHAR, varcharTypeInfo.getLength());
break;
case DATE:
typeDesc = new TypeDesc(TypeDesc.Type.DATE);
break;
case TIMESTAMP:
typeDesc = new TypeDesc(TypeDesc.Type.TIMESTAMP);
break;
case BINARY:
typeDesc = new TypeDesc(TypeDesc.Type.BINARY);
break;
case DECIMAL:
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
typeDesc = new TypeDesc(TypeDesc.Type.DECIMAL, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
break;
default:
throw new HiveException("Unsupported type " + typeString);
}
return typeDesc;
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class GenericUDTFGetSplits method process.
@Override
public void process(Object[] arguments) throws HiveException {
String query = stringOI.getPrimitiveJavaObject(arguments[0]);
int num = intOI.get(arguments[1]);
PlanFragment fragment = createPlanFragment(query, num);
TezWork tezWork = fragment.work;
Schema schema = fragment.schema;
try {
for (InputSplit s : getSplits(jc, num, tezWork, schema)) {
Object[] os = new Object[1];
bos.reset();
s.write(dos);
byte[] frozen = bos.toByteArray();
os[0] = frozen;
forward(os);
}
} catch (Exception e) {
throw new HiveException(e);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class GenericUDFEncode method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
String value = PrimitiveObjectInspectorUtils.getString(arguments[0].get(), stringOI);
if (value == null) {
return null;
}
ByteBuffer encoded;
if (encoder != null) {
try {
encoded = encoder.encode(CharBuffer.wrap(value));
} catch (CharacterCodingException e) {
throw new HiveException(e);
}
} else {
encoded = Charset.forName(PrimitiveObjectInspectorUtils.getString(arguments[1].get(), charsetOI)).encode(value);
}
result.setSize(encoded.limit());
encoded.get(result.getBytes(), 0, encoded.limit());
return result;
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class BasePartitionEvaluator method getRowBoundaryEnd.
private static int getRowBoundaryEnd(BoundaryDef b, int currRow, PTFPartition p) throws HiveException {
Direction d = b.getDirection();
int amt = b.getAmt();
switch(d) {
case PRECEDING:
if (amt == 0) {
return currRow + 1;
}
return currRow - amt + 1;
case CURRENT:
return currRow + 1;
case FOLLOWING:
if (amt == BoundarySpec.UNBOUNDED_AMOUNT) {
return p.size();
} else {
return currRow + amt + 1;
}
}
throw new HiveException("Unknown End Boundary Direction: " + d);
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class TestVectorSerDeRow method serializeRow.
private Output serializeRow(Object[] row, VectorRandomRowSource source, SerializeWrite serializeWrite) throws HiveException, IOException {
Output output = new Output();
serializeWrite.set(output);
PrimitiveTypeInfo[] primitiveTypeInfos = source.primitiveTypeInfos();
for (int i = 0; i < primitiveTypeInfos.length; i++) {
Object object = row[i];
PrimitiveCategory primitiveCategory = primitiveTypeInfos[i].getPrimitiveCategory();
switch(primitiveCategory) {
case BOOLEAN:
{
BooleanWritable expectedWritable = (BooleanWritable) object;
boolean value = expectedWritable.get();
serializeWrite.writeBoolean(value);
}
break;
case BYTE:
{
ByteWritable expectedWritable = (ByteWritable) object;
byte value = expectedWritable.get();
serializeWrite.writeByte(value);
}
break;
case SHORT:
{
ShortWritable expectedWritable = (ShortWritable) object;
short value = expectedWritable.get();
serializeWrite.writeShort(value);
}
break;
case INT:
{
IntWritable expectedWritable = (IntWritable) object;
int value = expectedWritable.get();
serializeWrite.writeInt(value);
}
break;
case LONG:
{
LongWritable expectedWritable = (LongWritable) object;
long value = expectedWritable.get();
serializeWrite.writeLong(value);
}
break;
case DATE:
{
DateWritable expectedWritable = (DateWritable) object;
Date value = expectedWritable.get();
serializeWrite.writeDate(value);
}
break;
case FLOAT:
{
FloatWritable expectedWritable = (FloatWritable) object;
float value = expectedWritable.get();
serializeWrite.writeFloat(value);
}
break;
case DOUBLE:
{
DoubleWritable expectedWritable = (DoubleWritable) object;
double value = expectedWritable.get();
serializeWrite.writeDouble(value);
}
break;
case STRING:
{
Text text = (Text) object;
serializeWrite.writeString(text.getBytes(), 0, text.getLength());
}
break;
case CHAR:
{
HiveCharWritable expectedWritable = (HiveCharWritable) object;
HiveChar value = expectedWritable.getHiveChar();
serializeWrite.writeHiveChar(value);
}
break;
case VARCHAR:
{
HiveVarcharWritable expectedWritable = (HiveVarcharWritable) object;
HiveVarchar value = expectedWritable.getHiveVarchar();
serializeWrite.writeHiveVarchar(value);
}
break;
case BINARY:
{
BytesWritable expectedWritable = (BytesWritable) object;
byte[] bytes = expectedWritable.getBytes();
int length = expectedWritable.getLength();
serializeWrite.writeBinary(bytes, 0, length);
}
break;
case TIMESTAMP:
{
TimestampWritable expectedWritable = (TimestampWritable) object;
Timestamp value = expectedWritable.getTimestamp();
serializeWrite.writeTimestamp(value);
}
break;
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonthWritable expectedWritable = (HiveIntervalYearMonthWritable) object;
HiveIntervalYearMonth value = expectedWritable.getHiveIntervalYearMonth();
serializeWrite.writeHiveIntervalYearMonth(value);
}
break;
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTimeWritable expectedWritable = (HiveIntervalDayTimeWritable) object;
HiveIntervalDayTime value = expectedWritable.getHiveIntervalDayTime();
serializeWrite.writeHiveIntervalDayTime(value);
}
break;
case DECIMAL:
{
HiveDecimalWritable expectedWritable = (HiveDecimalWritable) object;
HiveDecimal value = expectedWritable.getHiveDecimal();
serializeWrite.writeHiveDecimal(value, ((DecimalTypeInfo) primitiveTypeInfos[i]).scale());
}
break;
default:
throw new HiveException("Unexpected primitive category " + primitiveCategory);
}
}
return output;
}
Aggregations