use of org.apache.phoenix.flume.sink.PhoenixSink in project phoenix by apache.
the class CsvEventSerializerIT method testEventsWithHeaders.
@Test
public void testEventsWithHeaders() throws Exception {
sinkContext = new Context();
final String fullTableName = "FLUME_CSV_TEST";
final String ddl = "CREATE TABLE IF NOT EXISTS " + fullTableName + " (rowkey VARCHAR not null, col1 varchar , col2 double, col3 varchar[], col4 integer[], host varchar , source varchar \n" + " CONSTRAINT pk PRIMARY KEY (rowkey))\n";
String columns = "col1,col2,col3,col4";
String rowkeyType = DefaultKeyGenerator.UUID.name();
String headers = "host,source";
initSinkContext(fullTableName, ddl, columns, null, null, null, null, rowkeyType, headers);
sink = new PhoenixSink();
Configurables.configure(sink, sinkContext);
assertEquals(LifecycleState.IDLE, sink.getLifecycleState());
final Channel channel = this.initChannel();
sink.setChannel(channel);
sink.start();
int numEvents = 10;
String col1 = "val1";
String a1 = "\"aaa,bbb,ccc\"";
String a2 = "\"1,2,3,4\"";
String hostHeader = "host1";
String sourceHeader = "source1";
String eventBody = null;
List<Event> eventList = Lists.newArrayListWithCapacity(numEvents);
for (int i = 0; i < numEvents; i++) {
eventBody = (col1 + i) + "," + i * 10.5 + "," + a1 + "," + a2;
Map<String, String> headerMap = Maps.newHashMapWithExpectedSize(2);
headerMap.put("host", hostHeader);
headerMap.put("source", sourceHeader);
Event event = EventBuilder.withBody(Bytes.toBytes(eventBody), headerMap);
eventList.add(event);
}
// put event in channel
Transaction transaction = channel.getTransaction();
transaction.begin();
for (Event event : eventList) {
channel.put(event);
}
transaction.commit();
transaction.close();
sink.process();
final String query = " SELECT * FROM \n " + fullTableName;
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
final ResultSet rs;
final Connection conn = DriverManager.getConnection(getUrl(), props);
try {
rs = conn.createStatement().executeQuery(query);
assertTrue(rs.next());
assertEquals("host1", rs.getString("host"));
assertEquals("source1", rs.getString("source"));
assertTrue(rs.next());
assertEquals("host1", rs.getString("host"));
assertEquals("source1", rs.getString("source"));
} finally {
if (conn != null) {
conn.close();
}
}
sink.stop();
assertEquals(LifecycleState.STOP, sink.getLifecycleState());
dropTable(fullTableName);
}
use of org.apache.phoenix.flume.sink.PhoenixSink in project phoenix by apache.
the class CsvEventSerializerIT method testMissingColumnsInEvent.
@Test
public void testMissingColumnsInEvent() throws EventDeliveryException, SQLException {
final String fullTableName = "FLUME_CSV_TEST";
initSinkContextWithDefaults(fullTableName);
sink = new PhoenixSink();
Configurables.configure(sink, sinkContext);
assertEquals(LifecycleState.IDLE, sink.getLifecycleState());
final Channel channel = this.initChannel();
sink.setChannel(channel);
sink.start();
final String eventBody = "kalyan,\"abc,pqr,xyz\",\"1,2,3,4\"";
final Event event = EventBuilder.withBody(Bytes.toBytes(eventBody));
// put event in channel
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(event);
transaction.commit();
transaction.close();
sink.process();
int rowsInDb = countRows(fullTableName);
assertEquals(0, rowsInDb);
sink.stop();
assertEquals(LifecycleState.STOP, sink.getLifecycleState());
dropTable(fullTableName);
}
use of org.apache.phoenix.flume.sink.PhoenixSink in project phoenix by apache.
the class CsvEventSerializerIT method testBatchEvents.
@Test
public void testBatchEvents() throws EventDeliveryException, SQLException {
final String fullTableName = "FLUME_CSV_TEST";
initSinkContextWithDefaults(fullTableName);
sink = new PhoenixSink();
Configurables.configure(sink, sinkContext);
assertEquals(LifecycleState.IDLE, sink.getLifecycleState());
final Channel channel = this.initChannel();
sink.setChannel(channel);
sink.start();
int numEvents = 150;
String col1 = "val1";
String a1 = "\"aaa,bbb,ccc\"";
String a2 = "\"1,2,3,4\"";
String eventBody = null;
List<Event> eventList = Lists.newArrayListWithCapacity(numEvents);
for (int i = 0; i < eventList.size(); i++) {
eventBody = (col1 + i) + "," + i * 10.5 + "," + a1 + "," + a2;
Event event = EventBuilder.withBody(Bytes.toBytes(eventBody));
eventList.add(event);
}
// put event in channel
Transaction transaction = channel.getTransaction();
transaction.begin();
for (Event event : eventList) {
channel.put(event);
}
transaction.commit();
transaction.close();
sink.process();
int rowsInDb = countRows(fullTableName);
assertEquals(eventList.size(), rowsInDb);
sink.stop();
assertEquals(LifecycleState.STOP, sink.getLifecycleState());
dropTable(fullTableName);
}
use of org.apache.phoenix.flume.sink.PhoenixSink in project phoenix by apache.
the class CsvEventSerializerIT method testWithDefaultDelimiters.
@Test
public void testWithDefaultDelimiters() throws EventDeliveryException, SQLException {
final String fullTableName = "FLUME_CSV_TEST";
String ddl = "CREATE TABLE IF NOT EXISTS " + fullTableName + " (flume_time timestamp not null, col1 varchar , col2 double, col3 varchar[], col4 integer[]" + " CONSTRAINT pk PRIMARY KEY (flume_time))\n";
String columns = "col1,col2,col3,col4";
String rowkeyType = DefaultKeyGenerator.TIMESTAMP.name();
initSinkContext(fullTableName, ddl, columns, null, null, null, null, rowkeyType, null);
sink = new PhoenixSink();
Configurables.configure(sink, sinkContext);
assertEquals(LifecycleState.IDLE, sink.getLifecycleState());
final Channel channel = this.initChannel();
sink.setChannel(channel);
sink.start();
final String eventBody = "kalyan,10.5,\"abc,pqr,xyz\",\"1,2,3,4\"";
final Event event = EventBuilder.withBody(Bytes.toBytes(eventBody));
// put event in channel
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(event);
transaction.commit();
transaction.close();
sink.process();
int rowsInDb = countRows(fullTableName);
assertEquals(1, rowsInDb);
sink.stop();
assertEquals(LifecycleState.STOP, sink.getLifecycleState());
dropTable(fullTableName);
}
use of org.apache.phoenix.flume.sink.PhoenixSink in project phoenix by apache.
the class JsonEventSerializerIT method testDifferentColumnNames.
@Test
public void testDifferentColumnNames() throws EventDeliveryException, SQLException {
final String fullTableName = "FLUME_JSON_TEST";
String ddl = "CREATE TABLE IF NOT EXISTS " + fullTableName + " (flume_time timestamp not null, col1 varchar , col2 double, col3 varchar[], col4 integer[]" + " CONSTRAINT pk PRIMARY KEY (flume_time))\n";
String columns = "col1,col2,col3,col4";
String rowkeyType = DefaultKeyGenerator.TIMESTAMP.name();
String columnsMapping = "{\"col1\":\"col1\",\"col2\":\"f2\",\"col3\":\"f3\",\"col4\":\"col4\"}";
initSinkContext(fullTableName, ddl, columns, columnsMapping, rowkeyType, null);
sink = new PhoenixSink();
Configurables.configure(sink, sinkContext);
assertEquals(LifecycleState.IDLE, sink.getLifecycleState());
final Channel channel = this.initChannel();
sink.setChannel(channel);
sink.start();
final String eventBody = "{\"col1\" : \"kalyan\", \"f2\" : 10.5, \"f3\" : [\"abc\",\"pqr\",\"xyz\"], \"col4\" : [1,2,3,4]}";
final Event event = EventBuilder.withBody(Bytes.toBytes(eventBody));
// put event in channel
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(event);
transaction.commit();
transaction.close();
sink.process();
int rowsInDb = countRows(fullTableName);
assertEquals(1, rowsInDb);
sink.stop();
assertEquals(LifecycleState.STOP, sink.getLifecycleState());
dropTable(fullTableName);
}
Aggregations