use of org.apache.pig.data.Tuple in project phoenix by apache.
the class ReserveNSequenceTestIT method doTest.
private void doTest(Connection conn, UDFTestProperties props) throws Exception {
setCurrentValue(conn, props.getCurrentValue());
Tuple tuple = tupleFactory.newTuple(3);
tuple.set(0, props.getNumToReserve());
tuple.set(1, props.getSequenceName());
tuple.set(2, zkQuorum);
Long result = null;
try {
final String tenantId = conn.getClientInfo(PhoenixRuntime.TENANT_ID_ATTRIB);
ReserveNSequence udf = new ReserveNSequence(zkQuorum, tenantId);
result = udf.exec(tuple);
validateReservedSequence(conn, props.getCurrentValue(), props.getNumToReserve(), result);
// Calling this to cleanup for the udf. To close the connection
udf.finish();
} catch (Exception e) {
if (props.isExceptionExpected()) {
assertEquals(props.getExceptionClass(), e.getClass());
e.getMessage().contains(props.getErrorMessage());
} else {
throw e;
}
}
}
use of org.apache.pig.data.Tuple in project phoenix by apache.
the class TypeUtilTest method testTransformToTuple.
@Test
public void testTransformToTuple() throws Exception {
PhoenixRecordWritable record = mock(PhoenixRecordWritable.class);
Double[] doubleArr = new Double[2];
doubleArr[0] = 64.87;
doubleArr[1] = 89.96;
PhoenixArray arr = PArrayDataType.instantiatePhoenixArray(PDouble.INSTANCE, doubleArr);
Map<String, Object> values = Maps.newLinkedHashMap();
values.put("first", "213123");
values.put("second", 1231123);
values.put("third", 31231231232131L);
values.put("four", "bytearray".getBytes());
values.put("five", arr);
when(record.getResultMap()).thenReturn(values);
ResourceFieldSchema field = new ResourceFieldSchema().setType(DataType.CHARARRAY);
ResourceFieldSchema field1 = new ResourceFieldSchema().setType(DataType.INTEGER);
ResourceFieldSchema field2 = new ResourceFieldSchema().setType(DataType.LONG);
ResourceFieldSchema field3 = new ResourceFieldSchema().setType(DataType.BYTEARRAY);
ResourceFieldSchema field4 = new ResourceFieldSchema().setType(DataType.TUPLE);
ResourceFieldSchema[] projectedColumns = { field, field1, field2, field3, field4 };
Tuple t = TypeUtil.transformToTuple(record, projectedColumns);
assertEquals(DataType.LONG, DataType.findType(t.get(2)));
assertEquals(DataType.TUPLE, DataType.findType(t.get(4)));
Tuple doubleArrayTuple = (Tuple) t.get(4);
assertEquals(2, doubleArrayTuple.size());
field = new ResourceFieldSchema().setType(DataType.BIGDECIMAL);
field1 = new ResourceFieldSchema().setType(DataType.BIGINTEGER);
values.clear();
values.put("first", new BigDecimal(123123123.123213));
values.put("second", new BigInteger("1312313231312"));
ResourceFieldSchema[] columns = { field, field1 };
t = TypeUtil.transformToTuple(record, columns);
assertEquals(DataType.BIGDECIMAL, DataType.findType(t.get(0)));
assertEquals(DataType.BIGINTEGER, DataType.findType(t.get(1)));
}
use of org.apache.pig.data.Tuple in project phoenix by apache.
the class PhoenixHBaseLoader method getNext.
@Override
public Tuple getNext() throws IOException {
try {
if (!reader.nextKeyValue()) {
return null;
}
final PhoenixRecordWritable record = reader.getCurrentValue();
if (record == null) {
return null;
}
final Tuple tuple = TypeUtil.transformToTuple(record, schema.getFields());
return tuple;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
int errCode = 6018;
final String errMsg = "Error while reading input";
throw new ExecException(errMsg, errCode, PigException.REMOTE_ENVIRONMENT, e);
}
}
use of org.apache.pig.data.Tuple in project elephant-bird by twitter.
the class TestJsonLoader method testNullString.
/**
* {@link JsonLoader#parseStringToTuple(String)} must not choke on lines
* containing the word "null" (i.e. not the null value but the string
* "null").
* <p/>
* This can happen when the original input line to JSONParser contains "null"
* as a string. In this case {@link JSONParser#parse(java.io.Reader)} will
* return a null reference.
*/
@Test
public void testNullString() {
String nullString = "null";
JsonLoader jsonLoader = new JsonLoader();
Tuple result = jsonLoader.parseStringToTuple(nullString);
Assert.assertEquals("Parsing line with contents 'null'", null, result);
}
use of org.apache.pig.data.Tuple in project elephant-bird by twitter.
the class TestJsonLoader method testPigScript.
@Test
public void testPigScript() throws IOException {
File tempFile = File.createTempFile("json", null);
tempFile.deleteOnExit();
FileWriter writer = new FileWriter(tempFile);
writer.write("{\"score\": 10}\n");
writer.write("{\"score\": 20}\n");
writer.write("{\"score\": 30}\n");
writer.close();
PigServer pigServer = PigTestUtil.makePigServer();
logAndRegisterQuery(pigServer, "data = load '" + tempFile.getAbsolutePath() + "' using com.twitter.elephantbird.pig.load.JsonLoader() as (json: map[]);");
logAndRegisterQuery(pigServer, "a = foreach data generate (int) json#'score' as score;");
logAndRegisterQuery(pigServer, "b = group a all;");
logAndRegisterQuery(pigServer, "c = foreach b generate SUM(a.score) as total_score;");
Iterator<Tuple> tuples = pigServer.openIterator("c");
int count = 0;
while (tuples.hasNext()) {
Tuple t = tuples.next();
// expected sum of scores
Assert.assertEquals(new Long(60), t.get(0));
count++;
}
// expect just one tuple
Assert.assertEquals(1, count);
}
Aggregations