use of org.apache.hadoop.hive.serde2.io.DateWritable in project hive by apache.
the class GenericUDFDateAdd method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("date_add() requires 2 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
}
if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
}
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
switch(inputType1) {
case STRING:
case VARCHAR:
case CHAR:
inputType1 = PrimitiveCategory.STRING;
dateConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
dateConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
dateConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException(" DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
switch(inputType2) {
case BYTE:
daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableByteObjectInspector);
break;
case SHORT:
daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableShortObjectInspector);
break;
case INT:
daysConverter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[1], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
break;
default:
throw new UDFArgumentException(" DATE_ADD() only takes TINYINT/SMALLINT/INT types as second argument, got " + inputType2);
}
return outputOI;
}
use of org.apache.hadoop.hive.serde2.io.DateWritable in project hive by apache.
the class GenericUDFDateAdd method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0].get() == null) {
return null;
}
Object daysWritableObject = daysConverter.convert(arguments[1].get());
if (daysWritableObject == null) {
return null;
}
int toBeAdded;
if (daysWritableObject instanceof ByteWritable) {
toBeAdded = ((ByteWritable) daysWritableObject).get();
} else if (daysWritableObject instanceof ShortWritable) {
toBeAdded = ((ShortWritable) daysWritableObject).get();
} else if (daysWritableObject instanceof IntWritable) {
toBeAdded = ((IntWritable) daysWritableObject).get();
} else {
return null;
}
// Convert the first param into a DateWritable value
switch(inputType1) {
case STRING:
String dateString = dateConverter.convert(arguments[0].get()).toString();
if (dateParser.parseDate(dateString, dateVal)) {
output.set(dateVal);
} else {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritable) dateConverter.convert(arguments[0].get())).getTimestamp();
output.set(DateWritable.millisToDays(ts.getTime()));
break;
case DATE:
DateWritable dw = (DateWritable) dateConverter.convert(arguments[0].get());
output.set(dw.getDays());
break;
default:
throw new UDFArgumentException("DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
}
int newDays = output.getDays() + (signModifier * toBeAdded);
output.set(newDays);
return output;
}
use of org.apache.hadoop.hive.serde2.io.DateWritable in project hive by apache.
the class GenericUDFDateDiff method convertToDate.
private Date convertToDate(PrimitiveCategory inputType, Converter converter, DeferredObject argument) throws HiveException {
assert (converter != null);
assert (argument != null);
if (argument.get() == null) {
return null;
}
Date date = new Date(0);
switch(inputType) {
case STRING:
case VARCHAR:
case CHAR:
String dateString = converter.convert(argument.get()).toString();
try {
date.setTime(formatter.parse(dateString).getTime());
} catch (ParseException e) {
return null;
}
break;
case TIMESTAMP:
Timestamp ts = ((TimestampWritable) converter.convert(argument.get())).getTimestamp();
date.setTime(ts.getTime());
break;
case DATE:
DateWritable dw = (DateWritable) converter.convert(argument.get());
date = dw.get();
break;
default:
throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
}
return date;
}
use of org.apache.hadoop.hive.serde2.io.DateWritable in project hive by apache.
the class TestHiveAccumuloTypes method testBinaryTypes.
@Test
public void testBinaryTypes() throws Exception {
final String tableName = test.getMethodName(), user = "root", pass = "";
MockInstance mockInstance = new MockInstance(test.getMethodName());
Connector conn = mockInstance.getConnector(user, new PasswordToken(pass));
HiveAccumuloTableInputFormat inputformat = new HiveAccumuloTableInputFormat();
JobConf conf = new JobConf();
conf.set(AccumuloSerDeParameters.TABLE_NAME, tableName);
conf.set(AccumuloSerDeParameters.USE_MOCK_INSTANCE, "true");
conf.set(AccumuloSerDeParameters.INSTANCE_NAME, test.getMethodName());
conf.set(AccumuloSerDeParameters.USER_NAME, user);
conf.set(AccumuloSerDeParameters.USER_PASS, pass);
// not used for mock, but
conf.set(AccumuloSerDeParameters.ZOOKEEPERS, "localhost:2181");
// required by input format.
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, AccumuloHiveConstants.ROWID + ",cf:string,cf:boolean,cf:tinyint,cf:smallint,cf:int,cf:bigint" + ",cf:float,cf:double,cf:decimal,cf:date,cf:timestamp,cf:char,cf:varchar");
conf.set(serdeConstants.LIST_COLUMNS, "string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
conf.set(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, "binary");
conn.tableOperations().create(tableName);
BatchWriterConfig writerConf = new BatchWriterConfig();
BatchWriter writer = conn.createBatchWriter(tableName, writerConf);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
String cf = "cf";
byte[] cfBytes = cf.getBytes();
Mutation m = new Mutation("row1");
// string
String stringValue = "string";
JavaStringObjectInspector stringOI = (JavaStringObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos, stringOI.create(stringValue), stringOI, false, (byte) 0, null);
m.put(cfBytes, "string".getBytes(), baos.toByteArray());
// boolean
boolean booleanValue = true;
baos.reset();
JavaBooleanObjectInspector booleanOI = (JavaBooleanObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
LazyUtils.writePrimitive(baos, booleanOI.create(booleanValue), booleanOI);
m.put(cfBytes, "boolean".getBytes(), baos.toByteArray());
// tinyint
byte tinyintValue = -127;
baos.reset();
JavaByteObjectInspector byteOI = (JavaByteObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
LazyUtils.writePrimitive(baos, tinyintValue, byteOI);
m.put(cfBytes, "tinyint".getBytes(), baos.toByteArray());
// smallint
short smallintValue = Short.MAX_VALUE;
baos.reset();
JavaShortObjectInspector shortOI = (JavaShortObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
LazyUtils.writePrimitive(baos, smallintValue, shortOI);
m.put(cfBytes, "smallint".getBytes(), baos.toByteArray());
// int
int intValue = Integer.MAX_VALUE;
baos.reset();
JavaIntObjectInspector intOI = (JavaIntObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyUtils.writePrimitive(baos, intValue, intOI);
m.put(cfBytes, "int".getBytes(), baos.toByteArray());
// bigint
long bigintValue = Long.MAX_VALUE;
baos.reset();
JavaLongObjectInspector longOI = (JavaLongObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
LazyUtils.writePrimitive(baos, bigintValue, longOI);
m.put(cfBytes, "bigint".getBytes(), baos.toByteArray());
// float
float floatValue = Float.MAX_VALUE;
baos.reset();
JavaFloatObjectInspector floatOI = (JavaFloatObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
LazyUtils.writePrimitive(baos, floatValue, floatOI);
m.put(cfBytes, "float".getBytes(), baos.toByteArray());
// double
double doubleValue = Double.MAX_VALUE;
baos.reset();
JavaDoubleObjectInspector doubleOI = (JavaDoubleObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
LazyUtils.writePrimitive(baos, doubleValue, doubleOI);
m.put(cfBytes, "double".getBytes(), baos.toByteArray());
// decimal
baos.reset();
HiveDecimal decimalValue = HiveDecimal.create(65536l);
HiveDecimalWritable decimalWritable = new HiveDecimalWritable(decimalValue);
decimalWritable.write(out);
m.put(cfBytes, "decimal".getBytes(), baos.toByteArray());
// date
baos.reset();
Date now = new Date(System.currentTimeMillis());
DateWritable dateWritable = new DateWritable(now);
Date dateValue = dateWritable.get();
dateWritable.write(out);
m.put(cfBytes, "date".getBytes(), baos.toByteArray());
// tiemestamp
baos.reset();
Timestamp timestampValue = new Timestamp(now.getTime());
ByteStream.Output output = new ByteStream.Output();
TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(now.getTime()));
timestampWritable.write(new DataOutputStream(output));
output.close();
m.put(cfBytes, "timestamp".getBytes(), output.toByteArray());
// char
baos.reset();
HiveChar charValue = new HiveChar("char", 4);
JavaHiveCharObjectInspector charOI = (JavaHiveCharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new CharTypeInfo(4));
LazyUtils.writePrimitiveUTF8(baos, charOI.create(charValue), charOI, false, (byte) 0, null);
m.put(cfBytes, "char".getBytes(), baos.toByteArray());
baos.reset();
HiveVarchar varcharValue = new HiveVarchar("varchar", 7);
JavaHiveVarcharObjectInspector varcharOI = (JavaHiveVarcharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new VarcharTypeInfo(7));
LazyUtils.writePrimitiveUTF8(baos, varcharOI.create(varcharValue), varcharOI, false, (byte) 0, null);
m.put(cfBytes, "varchar".getBytes(), baos.toByteArray());
writer.addMutation(m);
writer.close();
for (Entry<Key, Value> e : conn.createScanner(tableName, new Authorizations())) {
System.out.println(e);
}
// Create the RecordReader
FileInputFormat.addInputPath(conf, new Path("unused"));
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(splits.length, 1);
RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
Text key = reader.createKey();
AccumuloHiveRow value = reader.createValue();
reader.next(key, value);
Assert.assertEquals(13, value.getTuples().size());
ByteArrayRef byteRef = new ByteArrayRef();
// string
Text cfText = new Text(cf), cqHolder = new Text();
cqHolder.set("string");
byte[] valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyStringObjectInspector lazyStringOI = LazyPrimitiveObjectInspectorFactory.getLazyStringObjectInspector(false, (byte) 0);
LazyString lazyString = (LazyString) LazyFactory.createLazyObject(lazyStringOI);
lazyString.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(stringValue, lazyString.getWritableObject().toString());
// boolean
cqHolder.set("boolean");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyBooleanObjectInspector lazyBooleanOI = (LazyBooleanObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
LazyBoolean lazyBoolean = (LazyBoolean) LazyFactory.createLazyPrimitiveBinaryClass(lazyBooleanOI);
lazyBoolean.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(booleanValue, lazyBoolean.getWritableObject().get());
// tinyint
cqHolder.set("tinyint");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyByteObjectInspector lazyByteOI = (LazyByteObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
LazyByte lazyByte = (LazyByte) LazyFactory.createLazyPrimitiveBinaryClass(lazyByteOI);
lazyByte.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(tinyintValue, lazyByte.getWritableObject().get());
// smallint
cqHolder.set("smallint");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyShortObjectInspector lazyShortOI = (LazyShortObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
LazyShort lazyShort = (LazyShort) LazyFactory.createLazyPrimitiveBinaryClass(lazyShortOI);
lazyShort.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(smallintValue, lazyShort.getWritableObject().get());
// int
cqHolder.set("int");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyIntObjectInspector lazyIntOI = (LazyIntObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyInteger lazyInt = (LazyInteger) LazyFactory.createLazyPrimitiveBinaryClass(lazyIntOI);
lazyInt.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(intValue, lazyInt.getWritableObject().get());
// bigint
cqHolder.set("bigint");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyLongObjectInspector lazyLongOI = (LazyLongObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
LazyLong lazyLong = (LazyLong) LazyFactory.createLazyPrimitiveBinaryClass(lazyLongOI);
lazyLong.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(bigintValue, lazyLong.getWritableObject().get());
// float
cqHolder.set("float");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyFloatObjectInspector lazyFloatOI = (LazyFloatObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
LazyFloat lazyFloat = (LazyFloat) LazyFactory.createLazyPrimitiveBinaryClass(lazyFloatOI);
lazyFloat.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(floatValue, lazyFloat.getWritableObject().get(), 0);
// double
cqHolder.set("double");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyDoubleObjectInspector lazyDoubleOI = (LazyDoubleObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
LazyDouble lazyDouble = (LazyDouble) LazyFactory.createLazyPrimitiveBinaryClass(lazyDoubleOI);
lazyDouble.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(doubleValue, lazyDouble.getWritableObject().get(), 0);
// decimal
cqHolder.set("decimal");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
ByteArrayInputStream bais = new ByteArrayInputStream(valueBytes);
DataInputStream in = new DataInputStream(bais);
decimalWritable.readFields(in);
Assert.assertEquals(decimalValue, decimalWritable.getHiveDecimal());
// date
cqHolder.set("date");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
bais = new ByteArrayInputStream(valueBytes);
in = new DataInputStream(bais);
dateWritable.readFields(in);
Assert.assertEquals(dateValue, dateWritable.get());
// timestamp
cqHolder.set("timestamp");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
bais = new ByteArrayInputStream(valueBytes);
in = new DataInputStream(bais);
timestampWritable.readFields(in);
Assert.assertEquals(timestampValue, timestampWritable.getTimestamp());
// char
cqHolder.set("char");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveCharObjectInspector lazyCharOI = (LazyHiveCharObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new CharTypeInfo(4));
LazyHiveChar lazyChar = (LazyHiveChar) LazyFactory.createLazyObject(lazyCharOI);
lazyChar.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(charValue, lazyChar.getWritableObject().getHiveChar());
// varchar
cqHolder.set("varchar");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveVarcharObjectInspector lazyVarcharOI = (LazyHiveVarcharObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new VarcharTypeInfo(7));
LazyHiveVarchar lazyVarchar = (LazyHiveVarchar) LazyFactory.createLazyObject(lazyVarcharOI);
lazyVarchar.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(varcharValue.toString(), lazyVarchar.getWritableObject().getHiveVarchar().toString());
}
use of org.apache.hadoop.hive.serde2.io.DateWritable in project hive by apache.
the class VectorizedBatchUtil method setVector.
private static void setVector(Object row, StructObjectInspector oi, StructField field, VectorizedRowBatch batch, DataOutputBuffer buffer, int rowIndex, int colIndex, int offset) throws HiveException {
Object fieldData = oi.getStructFieldData(row, field);
ObjectInspector foi = field.getFieldObjectInspector();
// Vectorization only supports PRIMITIVE data types. Assert the same
assert (foi.getCategory() == Category.PRIMITIVE);
// Get writable object
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) foi;
Object writableCol = poi.getPrimitiveWritableObject(fieldData);
// float/double. String types have no default value for null.
switch(poi.getPrimitiveCategory()) {
case BOOLEAN:
{
LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
lcv.vector[rowIndex] = ((BooleanWritable) writableCol).get() ? 1 : 0;
lcv.isNull[rowIndex] = false;
} else {
lcv.vector[rowIndex] = 1;
setNullColIsNullValue(lcv, rowIndex);
}
}
break;
case BYTE:
{
LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
lcv.vector[rowIndex] = ((ByteWritable) writableCol).get();
lcv.isNull[rowIndex] = false;
} else {
lcv.vector[rowIndex] = 1;
setNullColIsNullValue(lcv, rowIndex);
}
}
break;
case SHORT:
{
LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
lcv.vector[rowIndex] = ((ShortWritable) writableCol).get();
lcv.isNull[rowIndex] = false;
} else {
lcv.vector[rowIndex] = 1;
setNullColIsNullValue(lcv, rowIndex);
}
}
break;
case INT:
{
LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
lcv.vector[rowIndex] = ((IntWritable) writableCol).get();
lcv.isNull[rowIndex] = false;
} else {
lcv.vector[rowIndex] = 1;
setNullColIsNullValue(lcv, rowIndex);
}
}
break;
case LONG:
{
LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
lcv.vector[rowIndex] = ((LongWritable) writableCol).get();
lcv.isNull[rowIndex] = false;
} else {
lcv.vector[rowIndex] = 1;
setNullColIsNullValue(lcv, rowIndex);
}
}
break;
case DATE:
{
LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
lcv.vector[rowIndex] = ((DateWritable) writableCol).getDays();
lcv.isNull[rowIndex] = false;
} else {
lcv.vector[rowIndex] = 1;
setNullColIsNullValue(lcv, rowIndex);
}
}
break;
case FLOAT:
{
DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
dcv.vector[rowIndex] = ((FloatWritable) writableCol).get();
dcv.isNull[rowIndex] = false;
} else {
dcv.vector[rowIndex] = Double.NaN;
setNullColIsNullValue(dcv, rowIndex);
}
}
break;
case DOUBLE:
{
DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
dcv.vector[rowIndex] = ((DoubleWritable) writableCol).get();
dcv.isNull[rowIndex] = false;
} else {
dcv.vector[rowIndex] = Double.NaN;
setNullColIsNullValue(dcv, rowIndex);
}
}
break;
case TIMESTAMP:
{
TimestampColumnVector lcv = (TimestampColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
lcv.set(rowIndex, ((TimestampWritable) writableCol).getTimestamp());
lcv.isNull[rowIndex] = false;
} else {
lcv.setNullValue(rowIndex);
setNullColIsNullValue(lcv, rowIndex);
}
}
break;
case INTERVAL_YEAR_MONTH:
{
LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
HiveIntervalYearMonth i = ((HiveIntervalYearMonthWritable) writableCol).getHiveIntervalYearMonth();
lcv.vector[rowIndex] = i.getTotalMonths();
lcv.isNull[rowIndex] = false;
} else {
lcv.vector[rowIndex] = 1;
setNullColIsNullValue(lcv, rowIndex);
}
}
break;
case INTERVAL_DAY_TIME:
{
IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
HiveIntervalDayTime idt = ((HiveIntervalDayTimeWritable) writableCol).getHiveIntervalDayTime();
icv.set(rowIndex, idt);
icv.isNull[rowIndex] = false;
} else {
icv.setNullValue(rowIndex);
setNullColIsNullValue(icv, rowIndex);
}
}
break;
case BINARY:
{
BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
bcv.isNull[rowIndex] = false;
BytesWritable bw = (BytesWritable) writableCol;
byte[] bytes = bw.getBytes();
int start = buffer.getLength();
int length = bw.getLength();
try {
buffer.write(bytes, 0, length);
} catch (IOException ioe) {
throw new IllegalStateException("bad write", ioe);
}
bcv.setRef(rowIndex, buffer.getData(), start, length);
} else {
setNullColIsNullValue(bcv, rowIndex);
}
}
break;
case STRING:
{
BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
bcv.isNull[rowIndex] = false;
Text colText = (Text) writableCol;
int start = buffer.getLength();
int length = colText.getLength();
try {
buffer.write(colText.getBytes(), 0, length);
} catch (IOException ioe) {
throw new IllegalStateException("bad write", ioe);
}
bcv.setRef(rowIndex, buffer.getData(), start, length);
} else {
setNullColIsNullValue(bcv, rowIndex);
}
}
break;
case CHAR:
{
BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
bcv.isNull[rowIndex] = false;
HiveChar colHiveChar = ((HiveCharWritable) writableCol).getHiveChar();
byte[] bytes = colHiveChar.getStrippedValue().getBytes();
// We assume the CHAR maximum length was enforced when the object was created.
int length = bytes.length;
int start = buffer.getLength();
try {
// In vector mode, we store CHAR as unpadded.
buffer.write(bytes, 0, length);
} catch (IOException ioe) {
throw new IllegalStateException("bad write", ioe);
}
bcv.setRef(rowIndex, buffer.getData(), start, length);
} else {
setNullColIsNullValue(bcv, rowIndex);
}
}
break;
case VARCHAR:
{
BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
bcv.isNull[rowIndex] = false;
HiveVarchar colHiveVarchar = ((HiveVarcharWritable) writableCol).getHiveVarchar();
byte[] bytes = colHiveVarchar.getValue().getBytes();
// We assume the VARCHAR maximum length was enforced when the object was created.
int length = bytes.length;
int start = buffer.getLength();
try {
buffer.write(bytes, 0, length);
} catch (IOException ioe) {
throw new IllegalStateException("bad write", ioe);
}
bcv.setRef(rowIndex, buffer.getData(), start, length);
} else {
setNullColIsNullValue(bcv, rowIndex);
}
}
break;
case DECIMAL:
DecimalColumnVector dcv = (DecimalColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
dcv.isNull[rowIndex] = false;
HiveDecimalWritable wobj = (HiveDecimalWritable) writableCol;
dcv.set(rowIndex, wobj);
} else {
setNullColIsNullValue(dcv, rowIndex);
}
break;
default:
throw new HiveException("Vectorizaton is not supported for datatype:" + poi.getPrimitiveCategory());
}
}
Aggregations