use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestJsonSerDe method getData.
public List<Pair<Properties, HCatRecord>> getData() throws UnsupportedEncodingException {
List<Pair<Properties, HCatRecord>> data = new ArrayList<Pair<Properties, HCatRecord>>();
List<Object> rlist = new ArrayList<Object>(13);
rlist.add(Byte.valueOf("123"));
rlist.add(Short.valueOf("456"));
rlist.add(Integer.valueOf(789));
rlist.add(Long.valueOf(1000L));
rlist.add(Double.valueOf(5.3D));
rlist.add(Float.valueOf(2.39F));
rlist.add("hcat\nand\nhadoop");
rlist.add(null);
List<Object> innerStruct = new ArrayList<Object>(2);
innerStruct.add("abc");
innerStruct.add("def");
rlist.add(innerStruct);
List<Integer> innerList = new ArrayList<Integer>();
innerList.add(314);
innerList.add(007);
rlist.add(innerList);
Map<Short, String> map = new HashMap<Short, String>(3);
map.put(Short.valueOf("2"), "hcat is cool");
map.put(Short.valueOf("3"), "is it?");
map.put(Short.valueOf("4"), "or is it not?");
rlist.add(map);
rlist.add(Boolean.TRUE);
List<Object> c1 = new ArrayList<Object>();
List<Object> c1_1 = new ArrayList<Object>();
c1_1.add(Integer.valueOf(12));
List<Object> i2 = new ArrayList<Object>();
List<Integer> ii1 = new ArrayList<Integer>();
ii1.add(Integer.valueOf(13));
ii1.add(Integer.valueOf(14));
i2.add(ii1);
Map<String, List<?>> ii2 = new HashMap<String, List<?>>();
List<Integer> iii1 = new ArrayList<Integer>();
iii1.add(Integer.valueOf(15));
ii2.put("phew", iii1);
i2.add(ii2);
c1_1.add(i2);
c1.add(c1_1);
rlist.add(c1);
// prec 5, scale 2
rlist.add(HiveDecimal.create(new BigDecimal("123.45")));
rlist.add(new HiveChar("hive\nchar", 10));
rlist.add(new HiveVarchar("hive\nvarchar", 20));
rlist.add(Date.valueOf("2014-01-07"));
rlist.add(Timestamp.ofEpochMilli(System.currentTimeMillis()));
rlist.add("hive\nbinary".getBytes("UTF-8"));
List<Object> nlist = new ArrayList<Object>(13);
// tinyint
nlist.add(null);
// smallint
nlist.add(null);
// int
nlist.add(null);
// bigint
nlist.add(null);
// double
nlist.add(null);
// float
nlist.add(null);
// string
nlist.add(null);
// string
nlist.add(null);
// struct
nlist.add(null);
// array
nlist.add(null);
// map
nlist.add(null);
// bool
nlist.add(null);
// complex
nlist.add(null);
// decimal(5,2)
nlist.add(null);
// char(10)
nlist.add(null);
// varchar(20)
nlist.add(null);
// date
nlist.add(null);
// timestamp
nlist.add(null);
// binary
nlist.add(null);
String typeString = "tinyint,smallint,int,bigint,double,float,string,string," + "struct<a:string,b:string>,array<int>,map<smallint,string>,boolean," + "array<struct<i1:int,i2:struct<ii1:array<int>,ii2:map<string,struct<iii1:int>>>>>," + "decimal(5,2),char(10),varchar(20),date,timestamp,binary";
Properties props = new Properties();
props.put(serdeConstants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1,bd,hc,hvc,dt,ts,bin");
props.put(serdeConstants.LIST_COLUMN_TYPES, typeString);
// props.put(Constants.SERIALIZATION_NULL_FORMAT, "\\N");
// props.put(Constants.SERIALIZATION_FORMAT, "1");
data.add(new Pair<Properties, HCatRecord>(props, new DefaultHCatRecord(rlist)));
data.add(new Pair<Properties, HCatRecord>(props, new DefaultHCatRecord(nlist)));
return data;
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class WritableHiveCharObjectInspector method getPrimitiveWithParams.
private HiveChar getPrimitiveWithParams(HiveCharWritable val) {
HiveChar hv = new HiveChar();
hv.setValue(val.getHiveChar(), getMaxLength());
return hv;
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class PrimitiveObjectInspectorUtils method getHiveChar.
public static HiveChar getHiveChar(Object o, PrimitiveObjectInspector oi) {
if (o == null) {
return null;
}
HiveChar result = null;
switch(oi.getPrimitiveCategory()) {
case CHAR:
result = ((HiveCharObjectInspector) oi).getPrimitiveJavaObject(o);
break;
default:
// No char length available, copy whole string value here.
result = new HiveChar();
result.setValue(getString(o, oi));
break;
}
return result;
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class MyTestPrimitiveClass method nonRandomFill.
public void nonRandomFill(int idx, ExtraTypeInfo extraTypeInfo) {
myByte = (Byte) MyTestClass.getNonRandValue(MyTestClass.nrByte, idx);
myShort = (Short) MyTestClass.getNonRandValue(MyTestClass.nrShort, idx);
myInt = (Integer) MyTestClass.getNonRandValue(MyTestClass.nrInt, idx);
myLong = (Long) MyTestClass.getNonRandValue(MyTestClass.nrLong, idx);
myFloat = (Float) MyTestClass.getNonRandValue(MyTestClass.nrFloat, idx);
myDouble = (Double) MyTestClass.getNonRandValue(MyTestClass.nrDouble, idx);
myString = (String) MyTestClass.getNonRandValue(MyTestClass.nrString, idx);
myHiveChar = new HiveChar(myString, myString.length());
extraTypeInfo.hiveCharMaxLength = myString.length();
myHiveVarchar = new HiveVarchar(myString, myString.length());
extraTypeInfo.hiveVarcharMaxLength = myString.length();
myDecimal = (HiveDecimal) MyTestClass.getNonRandValue(MyTestClass.nrDecimal, idx);
extraTypeInfo.precision = myDecimal.precision();
extraTypeInfo.scale = myDecimal.scale();
myDate = (Date) MyTestClass.getNonRandValue(MyTestClass.nrDate, idx);
myIntervalYearMonth = (HiveIntervalYearMonth) MyTestClass.getNonRandValue(MyTestClass.nrIntervalYearMonth, idx);
myIntervalDayTime = (HiveIntervalDayTime) MyTestClass.getNonRandValue(MyTestClass.nrIntervalDayTime, idx);
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestHiveCharWritable method testStrippedValue.
@Test
public void testStrippedValue() {
// stripped end
HiveCharWritable hcw = new HiveCharWritable();
HiveChar hc = new HiveChar("abcd", 8);
hcw.set(hc);
assertEquals(4, hcw.getCharacterLength());
assertEquals("abcd ", hcw.toString());
assertEquals(new Text("abcd "), hcw.getTextValue());
assertEquals(new Text("abcd"), hcw.getStrippedValue());
assertEquals("abcd", hcw.getStrippedValue().toString());
// stripped end, untouched start
hcw = new HiveCharWritable();
hc = new HiveChar(" abcd ", 8);
hcw.set(hc);
// stripped 2 trailing whitespace
assertEquals(6, hcw.getCharacterLength());
assertEquals(" abcd ", hcw.toString());
assertEquals(new Text(" abcd "), hcw.getTextValue());
assertEquals(new Text(" abcd"), hcw.getStrippedValue());
assertEquals(" abcd", hcw.getStrippedValue().toString());
// empty
hcw = new HiveCharWritable();
hc = new HiveChar(" ", 8);
hcw.set(hc);
assertEquals(0, hcw.getCharacterLength());
assertEquals(" ", hcw.toString());
assertEquals(new Text(" "), hcw.getTextValue());
assertEquals(new Text(""), hcw.getStrippedValue());
assertEquals("", hcw.getStrippedValue().toString());
// truncated
hcw = new HiveCharWritable();
// will be truncated to 8 chars
hc = new HiveChar("abcdefghij", 8);
hcw.set(hc);
// truncated
assertEquals(8, hcw.getCharacterLength());
assertEquals("abcdefgh", hcw.toString());
assertEquals(new Text("abcdefgh"), hcw.getTextValue());
assertEquals(new Text("abcdefgh"), hcw.getStrippedValue());
assertEquals("abcdefgh", hcw.getStrippedValue().toString());
}
Aggregations