use of org.apache.pig.ResourceSchema.ResourceFieldSchema in project phoenix by apache.
the class TypeUtilTest method testTransformToTuple.
@Test
public void testTransformToTuple() throws Exception {
PhoenixRecordWritable record = mock(PhoenixRecordWritable.class);
Double[] doubleArr = new Double[2];
doubleArr[0] = 64.87;
doubleArr[1] = 89.96;
PhoenixArray arr = PArrayDataType.instantiatePhoenixArray(PDouble.INSTANCE, doubleArr);
Map<String, Object> values = Maps.newLinkedHashMap();
values.put("first", "213123");
values.put("second", 1231123);
values.put("third", 31231231232131L);
values.put("four", "bytearray".getBytes());
values.put("five", arr);
when(record.getResultMap()).thenReturn(values);
ResourceFieldSchema field = new ResourceFieldSchema().setType(DataType.CHARARRAY);
ResourceFieldSchema field1 = new ResourceFieldSchema().setType(DataType.INTEGER);
ResourceFieldSchema field2 = new ResourceFieldSchema().setType(DataType.LONG);
ResourceFieldSchema field3 = new ResourceFieldSchema().setType(DataType.BYTEARRAY);
ResourceFieldSchema field4 = new ResourceFieldSchema().setType(DataType.TUPLE);
ResourceFieldSchema[] projectedColumns = { field, field1, field2, field3, field4 };
Tuple t = TypeUtil.transformToTuple(record, projectedColumns);
assertEquals(DataType.LONG, DataType.findType(t.get(2)));
assertEquals(DataType.TUPLE, DataType.findType(t.get(4)));
Tuple doubleArrayTuple = (Tuple) t.get(4);
assertEquals(2, doubleArrayTuple.size());
field = new ResourceFieldSchema().setType(DataType.BIGDECIMAL);
field1 = new ResourceFieldSchema().setType(DataType.BIGINTEGER);
values.clear();
values.put("first", new BigDecimal(123123123.123213));
values.put("second", new BigInteger("1312313231312"));
ResourceFieldSchema[] columns = { field, field1 };
t = TypeUtil.transformToTuple(record, columns);
assertEquals(DataType.BIGDECIMAL, DataType.findType(t.get(0)));
assertEquals(DataType.BIGINTEGER, DataType.findType(t.get(1)));
}
use of org.apache.pig.ResourceSchema.ResourceFieldSchema in project phoenix by apache.
the class PhoenixPigSchemaUtilTest method testSchema.
@Test
public void testSchema() throws SQLException, IOException {
final Configuration configuration = mock(Configuration.class);
when(configuration.get(PhoenixConfigurationUtil.SCHEMA_TYPE)).thenReturn(SchemaType.TABLE.name());
final ResourceSchema actual = PhoenixPigSchemaUtil.getResourceSchema(configuration, new Dependencies() {
List<ColumnInfo> getSelectColumnMetadataList(Configuration configuration) throws SQLException {
return Lists.newArrayList(ID_COLUMN, NAME_COLUMN);
}
});
// expected schema.
final ResourceFieldSchema[] fields = new ResourceFieldSchema[2];
fields[0] = new ResourceFieldSchema().setName("ID").setType(DataType.LONG);
fields[1] = new ResourceFieldSchema().setName("NAME").setType(DataType.CHARARRAY);
final ResourceSchema expected = new ResourceSchema().setFields(fields);
assertEquals(expected.toString(), actual.toString());
}
use of org.apache.pig.ResourceSchema.ResourceFieldSchema in project phoenix by apache.
the class PhoenixHBaseStorage method putNext.
@Override
public void putNext(Tuple t) throws IOException {
ResourceFieldSchema[] fieldSchemas = (schema == null) ? null : schema.getFields();
PhoenixRecordWritable record = new PhoenixRecordWritable(this.columnInfo);
try {
for (int i = 0; i < t.size(); i++) {
Object value = t.get(i);
if (value == null) {
record.add(null);
continue;
}
ColumnInfo cinfo = this.columnInfo.get(i);
byte type = (fieldSchemas == null) ? DataType.findType(value) : fieldSchemas[i].getType();
PDataType pDataType = PDataType.fromTypeId(cinfo.getSqlType());
Object v = TypeUtil.castPigTypeToPhoenix(value, type, pDataType);
record.add(v);
}
this.writer.write(null, record);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} catch (SQLException e) {
LOG.error("Error on tuple {} .", t);
throw new IOException(e);
}
}
use of org.apache.pig.ResourceSchema.ResourceFieldSchema in project elephant-bird by twitter.
the class NullWritableConverter method getLoadSchema.
@Override
public ResourceFieldSchema getLoadSchema() throws IOException {
ResourceFieldSchema schema = new ResourceFieldSchema();
schema.setType(DataType.NULL);
return schema;
}
use of org.apache.pig.ResourceSchema.ResourceFieldSchema in project elephant-bird by twitter.
the class PigToProtobuf method schemaToProtoDescriptor.
/**
* For a given <code>ResourceSchema</code>, generate a protobufs <code>Descriptor</code> with analogous field names
* and types.
*
* @param schema Pig schema.
* @param extraFields optionally pass a List of extra fields (Pairs of name:type) to be included.
* @return Protobufs Descriptor
* @throws Descriptors.DescriptorValidationException
*/
public static Descriptor schemaToProtoDescriptor(ResourceSchema schema, List<Pair<String, Type>> extraFields) throws DescriptorValidationException {
// init protobufs
DescriptorProto.Builder desBuilder = DescriptorProto.newBuilder();
int count = 0;
for (ResourceFieldSchema fieldSchema : schema.getFields()) {
// Pig types
int position = ++count;
String fieldName = fieldSchema.getName();
byte dataTypeId = fieldSchema.getType();
// determine and add protobuf types
Type protoType = pigTypeToProtoType(dataTypeId);
LOG.info("Mapping Pig field " + fieldName + " of type " + dataTypeId + " to protobuf type: " + protoType);
addField(desBuilder, fieldName, position, protoType);
}
if (count == 0) {
throw new IllegalArgumentException("ResourceSchema does not have any fields");
}
// If extra fields are needed, let's add them
if (extraFields != null) {
for (Pair<String, Type> extraField : extraFields) {
addField(desBuilder, extraField.first, ++count, extraField.second);
}
}
desBuilder.setName("PigToProtobufDynamicBuilder");
return Protobufs.makeMessageDescriptor(desBuilder.build());
}
Aggregations