use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestVectorGenericDateExpressions method testDate.
@Test
public void testDate() throws HiveException {
for (PrimitiveCategory colType : dateTimestampStringTypes) {
LongColumnVector date = newRandomLongColumnVector(10000, size);
LongColumnVector output = new LongColumnVector(size);
VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
batch.cols[0] = castTo(date, colType);
batch.cols[1] = output;
validateDate(batch, colType, date);
TestVectorizedRowBatch.addRandomNulls(date);
batch.cols[0] = castTo(date, colType);
validateDate(batch, colType, date);
}
VectorExpression udf = new VectorUDFDateString(0, 1);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo });
udf.transientInit();
VectorizedRowBatch batch = new VectorizedRowBatch(2, 1);
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
BytesColumnVector bcv = (BytesColumnVector) batch.cols[0];
byte[] bytes = "error".getBytes(utf8);
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[1].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestVectorGenericDateExpressions method testDateSubColCol.
@Test
public void testDateSubColCol() throws HiveException {
for (PrimitiveCategory colType1 : dateTimestampStringTypes) testDateAddColCol(colType1, false);
VectorExpression udf = new VectorUDFDateSubColCol(0, 1, 2);
VectorizedRowBatch batch = new VectorizedRowBatch(3, 1);
BytesColumnVector bcv;
byte[] bytes = "error".getBytes(utf8);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
batch.cols[2] = new LongColumnVector(1);
bcv = (BytesColumnVector) batch.cols[0];
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestVectorGenericDateExpressions method testDateDiffColCol.
@Test
public void testDateDiffColCol() throws HiveException {
for (PrimitiveCategory colType1 : dateTimestampStringTypes) {
for (PrimitiveCategory colType2 : dateTimestampStringTypes) {
LongColumnVector date1 = newRandomLongColumnVector(10000, size);
LongColumnVector date2 = newRandomLongColumnVector(10000, size);
LongColumnVector output = new LongColumnVector(size);
VectorizedRowBatch batch = new VectorizedRowBatch(3, size);
batch.cols[0] = castTo(date1, colType1);
batch.cols[1] = castTo(date2, colType2);
batch.cols[2] = output;
validateDateDiff(batch, date1, date2, colType1, colType2);
TestVectorizedRowBatch.addRandomNulls(date1);
batch.cols[0] = castTo(date1, colType1);
validateDateDiff(batch, date1, date2, colType1, colType2);
TestVectorizedRowBatch.addRandomNulls(date2);
batch.cols[1] = castTo(date2, colType2);
validateDateDiff(batch, date1, date2, colType1, colType2);
}
}
VectorExpression udf = new VectorUDFDateDiffColCol(0, 1, 2);
VectorizedRowBatch batch = new VectorizedRowBatch(3, 1);
BytesColumnVector bcv;
byte[] bytes = "error".getBytes(utf8);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new TimestampColumnVector(1);
batch.cols[2] = new LongColumnVector(1);
bcv = (BytesColumnVector) batch.cols[0];
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0], true);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.stringTypeInfo });
udf.transientInit();
batch.cols[0] = new TimestampColumnVector(1);
batch.cols[1] = new BytesColumnVector(1);
batch.cols[2] = new LongColumnVector(1);
bcv = (BytesColumnVector) batch.cols[1];
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestVectorGenericDateExpressions method testDateAddColCol.
@Test
public void testDateAddColCol() throws HiveException {
for (PrimitiveCategory colType1 : dateTimestampStringTypes) testDateAddColCol(colType1, true);
VectorExpression udf = new VectorUDFDateAddColCol(0, 1, 2);
VectorizedRowBatch batch = new VectorizedRowBatch(3, 1);
BytesColumnVector bcv;
byte[] bytes = "error".getBytes(utf8);
udf.setInputTypeInfos(new TypeInfo[] { TypeInfoFactory.stringTypeInfo, TypeInfoFactory.timestampTypeInfo });
udf.transientInit();
batch.cols[0] = new BytesColumnVector(1);
batch.cols[1] = new LongColumnVector(1);
batch.cols[2] = new LongColumnVector(1);
bcv = (BytesColumnVector) batch.cols[0];
bcv.vector[0] = bytes;
bcv.start[0] = 0;
bcv.length[0] = bytes.length;
udf.evaluate(batch);
Assert.assertEquals(batch.cols[2].isNull[0], true);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestAccumuloPredicateHandler method testCreateIteratorSettings.
@Test
public void testCreateIteratorSettings() throws Exception {
// Override what's placed in the Configuration by setup()
conf = new JobConf();
List<String> columnNames = Arrays.asList("field1", "field2", "rid");
List<TypeInfo> columnTypes = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo);
conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames));
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,string");
conf.set(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, ColumnEncoding.BINARY.getName());
String columnMappingStr = "cf:f1,cf:f2,:rowID";
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr);
columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes);
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
assertNotNull(node);
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field2", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5);
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2);
assertNotNull(node2);
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
String filterExpr = SerializationUtilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
List<IteratorSetting> iterators = handler.getIterators(conf, columnMapper);
assertEquals(iterators.size(), 2);
IteratorSetting is1 = iterators.get(0);
IteratorSetting is2 = iterators.get(1);
boolean foundQual = false;
boolean foundPCompare = false;
boolean foundCOpt = false;
boolean foundConst = false;
for (Map.Entry<String, String> option : is1.getOptions().entrySet()) {
String optKey = option.getKey();
if (optKey.equals(PrimitiveComparisonFilter.COLUMN)) {
foundQual = true;
assertEquals(option.getValue(), "cf:f1");
} else if (optKey.equals(PrimitiveComparisonFilter.CONST_VAL)) {
foundConst = true;
assertEquals(option.getValue(), new String(Base64.encodeBase64("aaa".getBytes())));
} else if (optKey.equals(PrimitiveComparisonFilter.COMPARE_OPT_CLASS)) {
foundCOpt = true;
assertEquals(option.getValue(), LessThanOrEqual.class.getName());
} else if (optKey.equals(PrimitiveComparisonFilter.P_COMPARE_CLASS)) {
foundPCompare = true;
assertEquals(option.getValue(), StringCompare.class.getName());
}
}
assertTrue(foundConst & foundCOpt & foundPCompare & foundQual);
foundQual = false;
foundPCompare = false;
foundCOpt = false;
foundConst = false;
for (Map.Entry<String, String> option : is2.getOptions().entrySet()) {
String optKey = option.getKey();
if (optKey.equals(PrimitiveComparisonFilter.COLUMN)) {
foundQual = true;
assertEquals(option.getValue(), "cf:f2");
} else if (optKey.equals(PrimitiveComparisonFilter.CONST_VAL)) {
foundConst = true;
byte[] intVal = new byte[4];
ByteBuffer.wrap(intVal).putInt(5);
assertEquals(option.getValue(), new String(Base64.encodeBase64(intVal)));
} else if (optKey.equals(PrimitiveComparisonFilter.COMPARE_OPT_CLASS)) {
foundCOpt = true;
assertEquals(option.getValue(), GreaterThan.class.getName());
} else if (optKey.equals(PrimitiveComparisonFilter.P_COMPARE_CLASS)) {
foundPCompare = true;
assertEquals(option.getValue(), IntCompare.class.getName());
}
}
assertTrue(foundConst & foundCOpt & foundPCompare & foundQual);
}
Aggregations