use of org.apache.phoenix.schema.types.PhoenixArray in project phoenix by apache.
the class StringToArrayFunctionIT method testStringToArrayFunctionWithNestedFunctions3.
@Test
public void testStringToArrayFunctionWithNestedFunctions3() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT STRING_TO_ARRAY(ARRAY_TO_STRING(ARRAY['a', 'b', 'c'], delimiter2), ARRAY_ELEM(ARRAY[',', '.'], 2), 'b') FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
PhoenixArray expected = new PhoenixArray(PVarchar.INSTANCE, new Object[] { "a", null, "c" });
assertEquals(expected, rs.getArray(1));
assertFalse(rs.next());
}
use of org.apache.phoenix.schema.types.PhoenixArray in project phoenix by apache.
the class StringToArrayFunctionIT method testStringToArrayFunction2.
@Test
public void testStringToArrayFunction2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT STRING_TO_ARRAY(string1, delimiter1, nullstring1) FROM " + tableName + " WHERE region_name = 'SF Bay Area'");
assertTrue(rs.next());
PhoenixArray expected = new PhoenixArray(PVarchar.INSTANCE, new Object[] { "a", "b", null, "d" });
assertEquals(expected, rs.getArray(1));
assertFalse(rs.next());
}
use of org.apache.phoenix.schema.types.PhoenixArray in project phoenix by apache.
the class StringToArrayFunctionIT method testStringToArrayFunctionWithUpsertSelect2.
@Test
public void testStringToArrayFunctionWithUpsertSelect2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String sourceTable = generateUniqueName();
String ddl = "CREATE TABLE " + sourceTable + " (region_name VARCHAR PRIMARY KEY, varchar VARCHAR)";
conn.createStatement().execute(ddl);
String targetTable = generateUniqueName();
ddl = "CREATE TABLE " + targetTable + " (region_name VARCHAR PRIMARY KEY, varchars VARCHAR[])";
conn.createStatement().execute(ddl);
String dml = "UPSERT INTO " + sourceTable + "(region_name, varchar) VALUES('SF Bay Area', 'a,b,-,c,d')";
conn.createStatement().execute(dml);
dml = "UPSERT INTO " + sourceTable + "(region_name, varchar) VALUES('SF Bay Area2', '1,2,-,3,4')";
conn.createStatement().execute(dml);
conn.commit();
dml = "UPSERT INTO " + targetTable + "(region_name, varchars) SELECT region_name, STRING_TO_ARRAY(varchar, ',', '-') FROM " + sourceTable;
conn.createStatement().execute(dml);
conn.commit();
ResultSet rs;
rs = conn.createStatement().executeQuery("SELECT varchars FROM " + targetTable);
assertTrue(rs.next());
PhoenixArray expected = new PhoenixArray(PVarchar.INSTANCE, new Object[] { "a", "b", null, "c", "d" });
assertEquals(expected, rs.getArray(1));
assertTrue(rs.next());
expected = new PhoenixArray(PVarchar.INSTANCE, new Object[] { "1", "2", null, "3", "4" });
assertEquals(expected, rs.getArray(1));
assertFalse(rs.next());
}
use of org.apache.phoenix.schema.types.PhoenixArray in project phoenix by apache.
the class TypeUtilTest method testTransformToTuple.
@Test
public void testTransformToTuple() throws Exception {
PhoenixRecordWritable record = mock(PhoenixRecordWritable.class);
Double[] doubleArr = new Double[2];
doubleArr[0] = 64.87;
doubleArr[1] = 89.96;
PhoenixArray arr = PArrayDataType.instantiatePhoenixArray(PDouble.INSTANCE, doubleArr);
Map<String, Object> values = Maps.newLinkedHashMap();
values.put("first", "213123");
values.put("second", 1231123);
values.put("third", 31231231232131L);
values.put("four", "bytearray".getBytes());
values.put("five", arr);
when(record.getResultMap()).thenReturn(values);
ResourceFieldSchema field = new ResourceFieldSchema().setType(DataType.CHARARRAY);
ResourceFieldSchema field1 = new ResourceFieldSchema().setType(DataType.INTEGER);
ResourceFieldSchema field2 = new ResourceFieldSchema().setType(DataType.LONG);
ResourceFieldSchema field3 = new ResourceFieldSchema().setType(DataType.BYTEARRAY);
ResourceFieldSchema field4 = new ResourceFieldSchema().setType(DataType.TUPLE);
ResourceFieldSchema[] projectedColumns = { field, field1, field2, field3, field4 };
Tuple t = TypeUtil.transformToTuple(record, projectedColumns);
assertEquals(DataType.LONG, DataType.findType(t.get(2)));
assertEquals(DataType.TUPLE, DataType.findType(t.get(4)));
Tuple doubleArrayTuple = (Tuple) t.get(4);
assertEquals(2, doubleArrayTuple.size());
field = new ResourceFieldSchema().setType(DataType.BIGDECIMAL);
field1 = new ResourceFieldSchema().setType(DataType.BIGINTEGER);
values.clear();
values.put("first", new BigDecimal(123123123.123213));
values.put("second", new BigInteger("1312313231312"));
ResourceFieldSchema[] columns = { field, field1 };
t = TypeUtil.transformToTuple(record, columns);
assertEquals(DataType.BIGDECIMAL, DataType.findType(t.get(0)));
assertEquals(DataType.BIGINTEGER, DataType.findType(t.get(1)));
}
use of org.apache.phoenix.schema.types.PhoenixArray in project phoenix by apache.
the class TypeUtil method transformToTuple.
/**
* Transforms the PhoenixRecord to Pig {@link Tuple}.
*
* @param record
* @param projectedColumns
* @return
* @throws IOException
*/
public static Tuple transformToTuple(final PhoenixRecordWritable record, final ResourceFieldSchema[] projectedColumns) throws IOException {
Map<String, Object> columnValues = record.getResultMap();
if (columnValues == null || columnValues.size() == 0 || projectedColumns == null || projectedColumns.length != columnValues.size()) {
return null;
}
int numColumns = columnValues.size();
Tuple tuple = TUPLE_FACTORY.newTuple(numColumns);
try {
int i = 0;
for (Map.Entry<String, Object> entry : columnValues.entrySet()) {
final ResourceFieldSchema fieldSchema = projectedColumns[i];
Object object = entry.getValue();
if (object == null) {
tuple.set(i++, null);
continue;
}
switch(fieldSchema.getType()) {
case DataType.BYTEARRAY:
byte[] bytes = PDataType.fromTypeId(PBinary.INSTANCE.getSqlType()).toBytes(object);
tuple.set(i, new DataByteArray(bytes, 0, bytes.length));
break;
case DataType.CHARARRAY:
tuple.set(i, DataType.toString(object));
break;
case DataType.DOUBLE:
tuple.set(i, DataType.toDouble(object));
break;
case DataType.FLOAT:
tuple.set(i, DataType.toFloat(object));
break;
case DataType.INTEGER:
tuple.set(i, DataType.toInteger(object));
break;
case DataType.LONG:
tuple.set(i, DataType.toLong(object));
break;
case DataType.BOOLEAN:
tuple.set(i, DataType.toBoolean(object));
break;
case DataType.DATETIME:
if (object instanceof java.sql.Timestamp)
tuple.set(i, new DateTime(((java.sql.Timestamp) object).getTime()));
else
tuple.set(i, new DateTime(object));
break;
case DataType.BIGDECIMAL:
tuple.set(i, DataType.toBigDecimal(object));
break;
case DataType.BIGINTEGER:
tuple.set(i, DataType.toBigInteger(object));
break;
case DataType.TUPLE:
{
PhoenixArray array = (PhoenixArray) object;
Tuple t = TUPLE_FACTORY.newTuple(array.getDimensions());
;
for (int j = 0; j < array.getDimensions(); j++) {
t.set(j, array.getElement(j));
}
tuple.set(i, t);
break;
}
default:
throw new RuntimeException(String.format(" Not supported [%s] pig type", fieldSchema));
}
i++;
}
} catch (Exception ex) {
final String errorMsg = String.format(" Error transforming PhoenixRecord to Tuple [%s] ", ex.getMessage());
LOG.error(errorMsg);
throw new PigException(errorMsg);
}
return tuple;
}
Aggregations