use of org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo in project hive by apache.
the class VerifyFastRow method verifyDeserializeRead.
public static void verifyDeserializeRead(DeserializeRead deserializeRead, PrimitiveTypeInfo primitiveTypeInfo, Writable writable) throws IOException {
boolean isNull;
isNull = !deserializeRead.readNextField();
if (isNull) {
if (writable != null) {
TestCase.fail(deserializeRead.getClass().getName() + " field reports null but object is not null " + "(class " + writable.getClass().getName() + ", " + writable.toString() + ")");
}
return;
} else if (writable == null) {
TestCase.fail("Field report not null but object is null");
}
switch(primitiveTypeInfo.getPrimitiveCategory()) {
case BOOLEAN:
{
boolean value = deserializeRead.currentBoolean;
if (!(writable instanceof BooleanWritable)) {
TestCase.fail("Boolean expected writable not Boolean");
}
boolean expected = ((BooleanWritable) writable).get();
if (value != expected) {
TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case BYTE:
{
byte value = deserializeRead.currentByte;
if (!(writable instanceof ByteWritable)) {
TestCase.fail("Byte expected writable not Byte");
}
byte expected = ((ByteWritable) writable).get();
if (value != expected) {
TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
}
}
break;
case SHORT:
{
short value = deserializeRead.currentShort;
if (!(writable instanceof ShortWritable)) {
TestCase.fail("Short expected writable not Short");
}
short expected = ((ShortWritable) writable).get();
if (value != expected) {
TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case INT:
{
int value = deserializeRead.currentInt;
if (!(writable instanceof IntWritable)) {
TestCase.fail("Integer expected writable not Integer");
}
int expected = ((IntWritable) writable).get();
if (value != expected) {
TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case LONG:
{
long value = deserializeRead.currentLong;
if (!(writable instanceof LongWritable)) {
TestCase.fail("Long expected writable not Long");
}
Long expected = ((LongWritable) writable).get();
if (value != expected) {
TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case FLOAT:
{
float value = deserializeRead.currentFloat;
if (!(writable instanceof FloatWritable)) {
TestCase.fail("Float expected writable not Float");
}
float expected = ((FloatWritable) writable).get();
if (value != expected) {
TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case DOUBLE:
{
double value = deserializeRead.currentDouble;
if (!(writable instanceof DoubleWritable)) {
TestCase.fail("Double expected writable not Double");
}
double expected = ((DoubleWritable) writable).get();
if (value != expected) {
TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
}
}
break;
case STRING:
{
byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
Text text = new Text(stringBytes);
String string = text.toString();
String expected = ((Text) writable).toString();
if (!string.equals(expected)) {
TestCase.fail("String field mismatch (expected '" + expected + "' found '" + string + "')");
}
}
break;
case CHAR:
{
byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
Text text = new Text(stringBytes);
String string = text.toString();
HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
HiveChar expected = ((HiveCharWritable) writable).getHiveChar();
if (!hiveChar.equals(expected)) {
TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')");
}
}
break;
case VARCHAR:
{
byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
Text text = new Text(stringBytes);
String string = text.toString();
HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
HiveVarchar expected = ((HiveVarcharWritable) writable).getHiveVarchar();
if (!hiveVarchar.equals(expected)) {
TestCase.fail("Varchar field mismatch (expected '" + expected + "' found '" + hiveVarchar + "')");
}
}
break;
case DECIMAL:
{
HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
if (value == null) {
TestCase.fail("Decimal field evaluated to NULL");
}
HiveDecimal expected = ((HiveDecimalWritable) writable).getHiveDecimal();
if (!value.equals(expected)) {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
int precision = decimalTypeInfo.getPrecision();
int scale = decimalTypeInfo.getScale();
TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
}
}
break;
case DATE:
{
Date value = deserializeRead.currentDateWritable.get();
Date expected = ((DateWritable) writable).get();
if (!value.equals(expected)) {
TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
}
break;
case TIMESTAMP:
{
Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
Timestamp expected = ((TimestampWritable) writable).getTimestamp();
if (!value.equals(expected)) {
TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
}
break;
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
HiveIntervalYearMonth expected = ((HiveIntervalYearMonthWritable) writable).getHiveIntervalYearMonth();
if (!value.equals(expected)) {
TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
}
break;
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
HiveIntervalDayTime expected = ((HiveIntervalDayTimeWritable) writable).getHiveIntervalDayTime();
if (!value.equals(expected)) {
TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
}
break;
case BINARY:
{
byte[] byteArray = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
BytesWritable bytesWritable = (BytesWritable) writable;
byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());
if (byteArray.length != expected.length) {
TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
}
for (int b = 0; b < byteArray.length; b++) {
if (byteArray[b] != expected[b]) {
TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
}
}
}
break;
default:
throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo in project hive by apache.
the class TestGenericUDFOPNegative method testVarchar.
@Test
public void testVarchar() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
HiveVarchar vc = new HiveVarchar("32300.004747", 12);
HiveVarcharWritable input = new HiveVarcharWritable(vc);
VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(new Double(-32300.004747), new Double(res.get()));
}
use of org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo in project hive by apache.
the class TestGenericUDFFloor method testVarchar.
@Test
public void testVarchar() throws HiveException {
GenericUDFFloor udf = new GenericUDFFloor();
HiveVarchar vc = new HiveVarchar("32300.004747", 12);
HiveVarcharWritable input = new HiveVarcharWritable(vc);
VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
LongWritable res = (LongWritable) udf.evaluate(args);
Assert.assertEquals(32300L, res.get());
}
use of org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo in project presto by prestodb.
the class TestHiveFileFormats method testFailForLongVarcharPartitionColumn.
@Test
public void testFailForLongVarcharPartitionColumn() throws Exception {
TestColumn partitionColumn = new TestColumn("partition_column", getPrimitiveJavaObjectInspector(new VarcharTypeInfo(3)), "test", utf8Slice("tes"), true);
TestColumn varcharColumn = new TestColumn("varchar_column", getPrimitiveJavaObjectInspector(new VarcharTypeInfo(3)), new HiveVarchar("tes", 3), utf8Slice("tes"));
List<TestColumn> columns = ImmutableList.of(partitionColumn, varcharColumn);
HiveErrorCode expectedErrorCode = HiveErrorCode.HIVE_INVALID_PARTITION_VALUE;
String expectedMessage = "Invalid partition value 'test' for varchar(3) partition key: partition_column";
assertThatFileFormat(RCTEXT).withColumns(columns).isFailingForRecordCursor(new ColumnarTextHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage).isFailingForRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
assertThatFileFormat(RCBINARY).withColumns(columns).isFailingForRecordCursor(new ColumnarBinaryHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage).isFailingForRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
assertThatFileFormat(ORC).withColumns(columns).isFailingForPageSource(new OrcPageSourceFactory(TYPE_MANAGER, false, HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
assertThatFileFormat(PARQUET).withColumns(columns).withSession(parquetCursorSession).isFailingForRecordCursor(new ParquetRecordCursorProvider(false, HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
assertThatFileFormat(PARQUET).withColumns(columns).withSession(parquetCursorPushdownSession).isFailingForRecordCursor(new ParquetRecordCursorProvider(false, HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
assertThatFileFormat(PARQUET).withColumns(columns).withSession(parquetPageSourceSession).isFailingForPageSource(new ParquetPageSourceFactory(TYPE_MANAGER, false, HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
assertThatFileFormat(PARQUET).withColumns(columns).withSession(parquetPageSourcePushdown).isFailingForPageSource(new ParquetPageSourceFactory(TYPE_MANAGER, false, HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
assertThatFileFormat(SEQUENCEFILE).withColumns(columns).isFailingForRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
assertThatFileFormat(TEXTFILE).withColumns(columns).isFailingForRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
}
Aggregations