use of org.apache.phoenix.mapreduce.PhoenixRecordWritable in project phoenix by apache.
the class TypeUtilTest method testTransformToTuple.
@Test
public void testTransformToTuple() throws Exception {
PhoenixRecordWritable record = mock(PhoenixRecordWritable.class);
Double[] doubleArr = new Double[2];
doubleArr[0] = 64.87;
doubleArr[1] = 89.96;
PhoenixArray arr = PArrayDataType.instantiatePhoenixArray(PDouble.INSTANCE, doubleArr);
Map<String, Object> values = Maps.newLinkedHashMap();
values.put("first", "213123");
values.put("second", 1231123);
values.put("third", 31231231232131L);
values.put("four", "bytearray".getBytes());
values.put("five", arr);
when(record.getResultMap()).thenReturn(values);
ResourceFieldSchema field = new ResourceFieldSchema().setType(DataType.CHARARRAY);
ResourceFieldSchema field1 = new ResourceFieldSchema().setType(DataType.INTEGER);
ResourceFieldSchema field2 = new ResourceFieldSchema().setType(DataType.LONG);
ResourceFieldSchema field3 = new ResourceFieldSchema().setType(DataType.BYTEARRAY);
ResourceFieldSchema field4 = new ResourceFieldSchema().setType(DataType.TUPLE);
ResourceFieldSchema[] projectedColumns = { field, field1, field2, field3, field4 };
Tuple t = TypeUtil.transformToTuple(record, projectedColumns);
assertEquals(DataType.LONG, DataType.findType(t.get(2)));
assertEquals(DataType.TUPLE, DataType.findType(t.get(4)));
Tuple doubleArrayTuple = (Tuple) t.get(4);
assertEquals(2, doubleArrayTuple.size());
field = new ResourceFieldSchema().setType(DataType.BIGDECIMAL);
field1 = new ResourceFieldSchema().setType(DataType.BIGINTEGER);
values.clear();
values.put("first", new BigDecimal(123123123.123213));
values.put("second", new BigInteger("1312313231312"));
ResourceFieldSchema[] columns = { field, field1 };
t = TypeUtil.transformToTuple(record, columns);
assertEquals(DataType.BIGDECIMAL, DataType.findType(t.get(0)));
assertEquals(DataType.BIGINTEGER, DataType.findType(t.get(1)));
}
use of org.apache.phoenix.mapreduce.PhoenixRecordWritable in project phoenix by apache.
the class PhoenixHBaseLoader method getNext.
@Override
public Tuple getNext() throws IOException {
try {
if (!reader.nextKeyValue()) {
return null;
}
final PhoenixRecordWritable record = reader.getCurrentValue();
if (record == null) {
return null;
}
final Tuple tuple = TypeUtil.transformToTuple(record, schema.getFields());
return tuple;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
int errCode = 6018;
final String errMsg = "Error while reading input";
throw new ExecException(errMsg, errCode, PigException.REMOTE_ENVIRONMENT, e);
}
}
use of org.apache.phoenix.mapreduce.PhoenixRecordWritable in project phoenix by apache.
the class PhoenixHBaseStorage method putNext.
@Override
public void putNext(Tuple t) throws IOException {
ResourceFieldSchema[] fieldSchemas = (schema == null) ? null : schema.getFields();
PhoenixRecordWritable record = new PhoenixRecordWritable(this.columnInfo);
try {
for (int i = 0; i < t.size(); i++) {
Object value = t.get(i);
if (value == null) {
record.add(null);
continue;
}
ColumnInfo cinfo = this.columnInfo.get(i);
byte type = (fieldSchemas == null) ? DataType.findType(value) : fieldSchemas[i].getType();
PDataType pDataType = PDataType.fromTypeId(cinfo.getSqlType());
Object v = TypeUtil.castPigTypeToPhoenix(value, type, pDataType);
record.add(v);
}
this.writer.write(null, record);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} catch (SQLException e) {
LOG.error("Error on tuple {} .", t);
throw new IOException(e);
}
}
Aggregations