Search in sources :

Example 21 with TypeInfoFactory.intTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.

the class TestFunctionRegistry method testImplicitConversion.

public void testImplicitConversion() {
    implicit(TypeInfoFactory.intTypeInfo, TypeInfoFactory.decimalTypeInfo, true);
    implicit(TypeInfoFactory.longTypeInfo, TypeInfoFactory.decimalTypeInfo, true);
    implicit(TypeInfoFactory.floatTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
    implicit(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
    implicit(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
    implicit(TypeInfoFactory.dateTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
    implicit(TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
    implicit(varchar10, TypeInfoFactory.stringTypeInfo, true);
    implicit(TypeInfoFactory.stringTypeInfo, varchar10, true);
    // Try with parameterized varchar types
    TypeInfo varchar10 = TypeInfoFactory.getPrimitiveTypeInfo("varchar(10)");
    TypeInfo varchar20 = TypeInfoFactory.getPrimitiveTypeInfo("varchar(20)");
    implicit(varchar10, TypeInfoFactory.stringTypeInfo, true);
    implicit(varchar20, TypeInfoFactory.stringTypeInfo, true);
    implicit(TypeInfoFactory.stringTypeInfo, varchar10, true);
    implicit(TypeInfoFactory.stringTypeInfo, varchar20, true);
    implicit(varchar20, varchar10, true);
    implicit(char10, TypeInfoFactory.stringTypeInfo, true);
    implicit(TypeInfoFactory.stringTypeInfo, char10, true);
    implicit(char5, char10, true);
    implicit(char5, varchar10, true);
    implicit(varchar5, char10, true);
    implicit(TypeInfoFactory.intTypeInfo, char10, true);
    implicit(TypeInfoFactory.intTypeInfo, varchar10, true);
    implicit(TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo, true);
}
Also used : PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 22 with TypeInfoFactory.intTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.

the class TestLazyAccumuloMap method testBinaryIntMap.

@Test
public void testBinaryIntMap() throws SerDeException, IOException {
    AccumuloHiveRow row = new AccumuloHiveRow("row");
    row.add(new Text("cf1"), new Text(toBytes(1)), toBytes(2));
    row.add(new Text("cf1"), new Text(toBytes(2)), toBytes(4));
    row.add(new Text("cf1"), new Text(toBytes(3)), toBytes(6));
    HiveAccumuloMapColumnMapping mapping = new HiveAccumuloMapColumnMapping("cf1", null, ColumnEncoding.BINARY, ColumnEncoding.BINARY, "column", TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo).toString());
    // Map of Integer to String
    Text nullSequence = new Text("\\N");
    ObjectInspector oi = LazyFactory.createLazyObjectInspector(TypeInfoUtils.getTypeInfosFromTypeString("map<int,int>").get(0), new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
    LazyAccumuloMap map = new LazyAccumuloMap((LazyMapObjectInspector) oi);
    map.init(row, mapping);
    Assert.assertEquals(3, map.getMapSize());
    Object o = map.getMapValueElement(new IntWritable(1));
    Assert.assertNotNull(o);
    Assert.assertEquals(new IntWritable(2), ((LazyInteger) o).getWritableObject());
    o = map.getMapValueElement(new IntWritable(2));
    Assert.assertNotNull(o);
    Assert.assertEquals(new IntWritable(4), ((LazyInteger) o).getWritableObject());
    o = map.getMapValueElement(new IntWritable(3));
    Assert.assertNotNull(o);
    Assert.assertEquals(new IntWritable(6), ((LazyInteger) o).getWritableObject());
}
Also used : LazyMapObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveAccumuloMapColumnMapping(org.apache.hadoop.hive.accumulo.columns.HiveAccumuloMapColumnMapping) Text(org.apache.hadoop.io.Text) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 23 with TypeInfoFactory.intTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.

the class TestLazyAccumuloMap method testIntMap.

@Test
public void testIntMap() throws SerDeException, IOException {
    AccumuloHiveRow row = new AccumuloHiveRow("row");
    row.add(new Text("cf1"), new Text("1"), "2".getBytes());
    row.add(new Text("cf1"), new Text("2"), "4".getBytes());
    row.add(new Text("cf1"), new Text("3"), "6".getBytes());
    HiveAccumuloMapColumnMapping mapping = new HiveAccumuloMapColumnMapping("cf1", null, ColumnEncoding.STRING, ColumnEncoding.STRING, "column", TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo).toString());
    // Map of Integer to Integer
    Text nullSequence = new Text("\\N");
    ObjectInspector oi = LazyFactory.createLazyObjectInspector(TypeInfoUtils.getTypeInfosFromTypeString("map<int,int>").get(0), new byte[] { (byte) 1, (byte) 2 }, 0, nullSequence, false, (byte) 0);
    LazyAccumuloMap map = new LazyAccumuloMap((LazyMapObjectInspector) oi);
    map.init(row, mapping);
    Assert.assertEquals(3, map.getMapSize());
    Object o = map.getMapValueElement(new IntWritable(1));
    Assert.assertNotNull(o);
    Assert.assertEquals(new IntWritable(2), ((LazyInteger) o).getWritableObject());
    o = map.getMapValueElement(new IntWritable(2));
    Assert.assertNotNull(o);
    Assert.assertEquals(new IntWritable(4), ((LazyInteger) o).getWritableObject());
    o = map.getMapValueElement(new IntWritable(3));
    Assert.assertNotNull(o);
    Assert.assertEquals(new IntWritable(6), ((LazyInteger) o).getWritableObject());
}
Also used : LazyMapObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveAccumuloMapColumnMapping(org.apache.hadoop.hive.accumulo.columns.HiveAccumuloMapColumnMapping) Text(org.apache.hadoop.io.Text) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 24 with TypeInfoFactory.intTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.

the class TestLazyAccumuloRow method testDeserializationOfBinaryEncoding.

@Test
public void testDeserializationOfBinaryEncoding() throws Exception {
    List<String> columns = Arrays.asList("row", "given_name", "surname", "age", "weight", "height");
    List<TypeInfo> types = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo);
    LazySimpleStructObjectInspector objectInspector = (LazySimpleStructObjectInspector) LazyFactory.createLazyStructInspector(columns, types, LazySerDeParameters.DefaultSeparators, new Text("\\N"), false, false, (byte) '\\');
    DefaultAccumuloRowIdFactory rowIdFactory = new DefaultAccumuloRowIdFactory();
    Properties props = new Properties();
    props.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid#s,personal:given_name#s,personal:surname#s,personal:age,personal:weight,personal:height");
    props.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
    props.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
    props.setProperty(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, ColumnEncoding.BINARY.getName());
    AccumuloSerDeParameters params = new AccumuloSerDeParameters(new Configuration(), props, AccumuloSerDe.class.getName());
    rowIdFactory.init(params, props);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(baos);
    LazyAccumuloRow lazyRow = new LazyAccumuloRow(objectInspector);
    AccumuloHiveRow hiveRow = new AccumuloHiveRow("1");
    hiveRow.add("personal", "given_name", "Bob".getBytes());
    hiveRow.add("personal", "surname", "Stevens".getBytes());
    out.writeInt(30);
    hiveRow.add("personal", "age", baos.toByteArray());
    baos.reset();
    out.writeInt(200);
    hiveRow.add("personal", "weight", baos.toByteArray());
    baos.reset();
    out.writeInt(72);
    hiveRow.add("personal", "height", baos.toByteArray());
    ColumnMapper columnMapper = params.getColumnMapper();
    lazyRow.init(hiveRow, columnMapper.getColumnMappings(), rowIdFactory);
    Object o = lazyRow.getField(0);
    Assert.assertNotNull(o);
    Assert.assertEquals(LazyString.class, o.getClass());
    Assert.assertEquals("1", ((LazyString) o).toString());
    o = lazyRow.getField(1);
    Assert.assertNotNull(o);
    Assert.assertEquals(LazyString.class, o.getClass());
    Assert.assertEquals("Bob", ((LazyString) o).toString());
    o = lazyRow.getField(2);
    Assert.assertNotNull(o);
    Assert.assertEquals(LazyString.class, o.getClass());
    Assert.assertEquals("Stevens", ((LazyString) o).toString());
    o = lazyRow.getField(3);
    Assert.assertNotNull(o);
    Assert.assertEquals(LazyDioInteger.class, o.getClass());
    Assert.assertEquals("30", ((LazyDioInteger) o).toString());
    o = lazyRow.getField(4);
    Assert.assertNotNull(o);
    Assert.assertEquals(LazyDioInteger.class, o.getClass());
    Assert.assertEquals("200", ((LazyDioInteger) o).toString());
    o = lazyRow.getField(5);
    Assert.assertNotNull(o);
    Assert.assertEquals(LazyDioInteger.class, o.getClass());
    Assert.assertEquals("72", ((LazyDioInteger) o).toString());
}
Also used : LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) Configuration(org.apache.hadoop.conf.Configuration) DataOutputStream(java.io.DataOutputStream) Text(org.apache.hadoop.io.Text) LazyString(org.apache.hadoop.hive.serde2.lazy.LazyString) DefaultAccumuloRowIdFactory(org.apache.hadoop.hive.accumulo.serde.DefaultAccumuloRowIdFactory) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Properties(java.util.Properties) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) AccumuloSerDe(org.apache.hadoop.hive.accumulo.serde.AccumuloSerDe) AccumuloSerDeParameters(org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters) ColumnMapper(org.apache.hadoop.hive.accumulo.columns.ColumnMapper) Test(org.junit.Test)

Example 25 with TypeInfoFactory.intTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.intTypeInfo in project hive by apache.

the class TestLazyAccumuloRow method testExpectedDeserializationOfColumns.

@Test
public void testExpectedDeserializationOfColumns() throws Exception {
    List<String> columns = Arrays.asList("row", "given_name", "surname", "age", "weight", "height");
    List<TypeInfo> types = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo);
    LazySimpleStructObjectInspector objectInspector = (LazySimpleStructObjectInspector) LazyFactory.createLazyStructInspector(columns, types, LazySerDeParameters.DefaultSeparators, new Text("\\N"), false, false, (byte) '\\');
    DefaultAccumuloRowIdFactory rowIdFactory = new DefaultAccumuloRowIdFactory();
    Properties props = new Properties();
    props.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowid,personal:given_name,personal:surname,personal:age,personal:weight,personal:height");
    props.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columns));
    props.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
    AccumuloSerDeParameters params = new AccumuloSerDeParameters(new Configuration(), props, AccumuloSerDe.class.getName());
    rowIdFactory.init(params, props);
    LazyAccumuloRow lazyRow = new LazyAccumuloRow(objectInspector);
    AccumuloHiveRow hiveRow = new AccumuloHiveRow("1");
    hiveRow.add("personal", "given_name", "Bob".getBytes());
    hiveRow.add("personal", "surname", "Stevens".getBytes());
    hiveRow.add("personal", "age", "30".getBytes());
    hiveRow.add("personal", "weight", "200".getBytes());
    hiveRow.add("personal", "height", "72".getBytes());
    ColumnMapper columnMapper = params.getColumnMapper();
    lazyRow.init(hiveRow, columnMapper.getColumnMappings(), rowIdFactory);
    Object o = lazyRow.getField(0);
    Assert.assertEquals(LazyString.class, o.getClass());
    Assert.assertEquals("1", ((LazyString) o).toString());
    o = lazyRow.getField(1);
    Assert.assertEquals(LazyString.class, o.getClass());
    Assert.assertEquals("Bob", ((LazyString) o).toString());
    o = lazyRow.getField(2);
    Assert.assertEquals(LazyString.class, o.getClass());
    Assert.assertEquals("Stevens", ((LazyString) o).toString());
    o = lazyRow.getField(3);
    Assert.assertEquals(LazyInteger.class, o.getClass());
    Assert.assertEquals("30", ((LazyInteger) o).toString());
    o = lazyRow.getField(4);
    Assert.assertEquals(LazyInteger.class, o.getClass());
    Assert.assertEquals("200", ((LazyInteger) o).toString());
    o = lazyRow.getField(5);
    Assert.assertEquals(LazyInteger.class, o.getClass());
    Assert.assertEquals("72", ((LazyInteger) o).toString());
}
Also used : LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) Configuration(org.apache.hadoop.conf.Configuration) Text(org.apache.hadoop.io.Text) LazyString(org.apache.hadoop.hive.serde2.lazy.LazyString) DefaultAccumuloRowIdFactory(org.apache.hadoop.hive.accumulo.serde.DefaultAccumuloRowIdFactory) Properties(java.util.Properties) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) AccumuloSerDe(org.apache.hadoop.hive.accumulo.serde.AccumuloSerDe) AccumuloSerDeParameters(org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters) ColumnMapper(org.apache.hadoop.hive.accumulo.columns.ColumnMapper) Test(org.junit.Test)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)31 IntWritable (org.apache.hadoop.io.IntWritable)30 Test (org.junit.Test)26 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)16 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)10 Text (org.apache.hadoop.io.Text)10 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)7 ArrayList (java.util.ArrayList)6 ColumnMapper (org.apache.hadoop.hive.accumulo.columns.ColumnMapper)6 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)6 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)6 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)5 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)5 ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 Properties (java.util.Properties)4 Configuration (org.apache.hadoop.conf.Configuration)4 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)4 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)4 DataOutputStream (java.io.DataOutputStream)3 HiveAccumuloMapColumnMapping (org.apache.hadoop.hive.accumulo.columns.HiveAccumuloMapColumnMapping)3