use of org.apache.flume.Channel in project phoenix by apache.
the class RegexEventSerializerIT method testMissingColumnsInEvent.
@Test
public void testMissingColumnsInEvent() throws EventDeliveryException, SQLException {
final String fullTableName = generateUniqueName();
initSinkContextWithDefaults(fullTableName);
sink = new PhoenixSink();
Configurables.configure(sink, sinkContext);
assertEquals(LifecycleState.IDLE, sink.getLifecycleState());
final Channel channel = this.initChannel();
sink.setChannel(channel);
sink.start();
final String eventBody = "val1";
final Event event = EventBuilder.withBody(Bytes.toBytes(eventBody));
// put event in channel
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(event);
transaction.commit();
transaction.close();
sink.process();
int rowsInDb = countRows(fullTableName);
assertEquals(0, rowsInDb);
sink.stop();
assertEquals(LifecycleState.STOP, sink.getLifecycleState());
}
use of org.apache.flume.Channel in project phoenix by apache.
the class RegexEventSerializerIT method testKeyGenerator.
@Test
public void testKeyGenerator() throws EventDeliveryException, SQLException {
final String fullTableName = generateUniqueName();
initSinkContextWithDefaults(fullTableName);
sink = new PhoenixSink();
Configurables.configure(sink, sinkContext);
assertEquals(LifecycleState.IDLE, sink.getLifecycleState());
final Channel channel = this.initChannel();
sink.setChannel(channel);
sink.start();
final String eventBody = "val1\tval2";
final Event event = EventBuilder.withBody(Bytes.toBytes(eventBody));
// put event in channel
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(event);
transaction.commit();
transaction.close();
sink.process();
int rowsInDb = countRows(fullTableName);
assertEquals(1, rowsInDb);
sink.stop();
assertEquals(LifecycleState.STOP, sink.getLifecycleState());
}
use of org.apache.flume.Channel in project phoenix by apache.
the class JsonEventSerializerIT method testKeyGenerator.
@Test
public void testKeyGenerator() throws EventDeliveryException, SQLException {
final String fullTableName = "FLUME_JSON_TEST";
initSinkContextWithDefaults(fullTableName);
sink = new PhoenixSink();
Configurables.configure(sink, sinkContext);
assertEquals(LifecycleState.IDLE, sink.getLifecycleState());
final Channel channel = this.initChannel();
sink.setChannel(channel);
sink.start();
final String eventBody = "{\"col1\" : \"kalyan\", \"col2\" : 10.5, \"col3\" : [\"abc\",\"pqr\",\"xyz\"], \"col4\" : [1,2,3,4]}";
final Event event = EventBuilder.withBody(Bytes.toBytes(eventBody));
// put event in channel
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(event);
transaction.commit();
transaction.close();
sink.process();
int rowsInDb = countRows(fullTableName);
assertEquals(1, rowsInDb);
sink.stop();
assertEquals(LifecycleState.STOP, sink.getLifecycleState());
dropTable(fullTableName);
}
use of org.apache.flume.Channel in project phoenix by apache.
the class PhoenixSinkIT method testExtendedSerializer.
@Test
public void testExtendedSerializer() throws Exception {
/*
Sadly, we can't mock a serializer, as the PhoenixSink does a Class.forName() to instantiate
it. Instead. we'll setup a Flume channel and verify the data our custom serializer wrote.
*/
final String fullTableName = "FLUME_TEST_EXTENDED";
final String ddl = "CREATE TABLE " + fullTableName + " (ID BIGINT NOT NULL PRIMARY KEY, COUNTS UNSIGNED_LONG)";
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
final Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute(ddl);
conn.commit();
sinkContext = new Context();
sinkContext.put(FlumeConstants.CONFIG_TABLE, "FLUME_TEST_EXTENDED");
sinkContext.put(FlumeConstants.CONFIG_JDBC_URL, getUrl());
sinkContext.put(FlumeConstants.CONFIG_SERIALIZER, CustomSerializer.class.getName());
sinkContext.put(FlumeConstants.CONFIG_SERIALIZER_PREFIX + FlumeConstants.CONFIG_COLUMN_NAMES, "ID, COUNTS");
sinkContext.put(FlumeConstants.CONFIG_SERIALIZER_PREFIX + FlumeConstants.CONFIG_ROWKEY_TYPE_GENERATOR, DefaultKeyGenerator.TIMESTAMP.name());
PhoenixSink sink = new PhoenixSink();
Configurables.configure(sink, sinkContext);
// Send a test event through Flume, using our custom serializer
final Channel channel = this.initChannel();
sink.setChannel(channel);
sink.start();
final Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(EventBuilder.withBody(Bytes.toBytes("test event")));
transaction.commit();
transaction.close();
sink.process();
sink.stop();
// Verify our serializer wrote out data
ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM FLUME_TEST_EXTENDED");
assertTrue(rs.next());
assertTrue(rs.getLong(1) == 1L);
}
use of org.apache.flume.Channel in project ignite by apache.
the class IgniteSink method process.
/**
* Processes Flume events.
*/
@Override
public Status process() throws EventDeliveryException {
Channel channel = getChannel();
Transaction transaction = channel.getTransaction();
int eventCount = 0;
try {
transaction.begin();
List<Event> batch = new ArrayList<>(batchSize);
for (; eventCount < batchSize; ++eventCount) {
Event event = channel.take();
if (event == null) {
break;
}
batch.add(event);
}
if (!batch.isEmpty()) {
ignite.cache(cacheName).putAll(eventTransformer.transform(batch));
if (batch.size() < batchSize)
sinkCounter.incrementBatchUnderflowCount();
else
sinkCounter.incrementBatchCompleteCount();
} else {
sinkCounter.incrementBatchEmptyCount();
}
sinkCounter.addToEventDrainAttemptCount(batch.size());
transaction.commit();
sinkCounter.addToEventDrainSuccessCount(batch.size());
} catch (Exception e) {
log.error("Failed to process events", e);
try {
transaction.rollback();
} catch (Throwable e1) {
e.addSuppressed(e1);
}
throw new EventDeliveryException(e);
} finally {
transaction.close();
}
return eventCount == 0 ? Status.BACKOFF : Status.READY;
}
Aggregations