Search in sources :

Example 1 with EventCodec

use of org.apache.apex.malhar.flume.storage.EventCodec in project apex-malhar by apache.

the class FlumeSink method configure.

/* End implementing Flume Sink interface */
/* Begin Configurable Interface */
@Override
public void configure(Context context) {
    hostname = context.getString(HOSTNAME_STRING, HOSTNAME_DEFAULT);
    port = context.getInteger("port", 0);
    id = context.getString("id");
    if (id == null) {
        id = getName();
    }
    acceptedTolerance = context.getLong("acceptedTolerance", ACCEPTED_TOLERANCE);
    sleepMillis = context.getLong("sleepMillis", 5L);
    throughputAdjustmentFactor = context.getInteger("throughputAdjustmentPercent", 5) / 100.0;
    maximumEventsPerTransaction = context.getInteger("maximumEventsPerTransaction", 10000);
    minimumEventsPerTransaction = context.getInteger("minimumEventsPerTransaction", 100);
    commitEventTimeoutMillis = context.getLong("commitEventTimeoutMillis", Long.MAX_VALUE);
    @SuppressWarnings("unchecked") Discovery<byte[]> ldiscovery = configure("discovery", Discovery.class, context);
    if (ldiscovery == null) {
        logger.warn("Discovery agent not configured for the sink!");
        discovery = new Discovery<byte[]>() {

            @Override
            public void unadvertise(Service<byte[]> service) {
                logger.debug("Sink {} stopped listening on {}:{}", service.getId(), service.getHost(), service.getPort());
            }

            @Override
            public void advertise(Service<byte[]> service) {
                logger.debug("Sink {} started listening on {}:{}", service.getId(), service.getHost(), service.getPort());
            }

            @Override
            @SuppressWarnings("unchecked")
            public Collection<Service<byte[]>> discover() {
                return Collections.EMPTY_SET;
            }
        };
    } else {
        discovery = ldiscovery;
    }
    storage = configure("storage", Storage.class, context);
    if (storage == null) {
        logger.warn("storage key missing... FlumeSink may lose data!");
        storage = new Storage() {

            @Override
            public byte[] store(Slice slice) {
                return null;
            }

            @Override
            public byte[] retrieve(byte[] identifier) {
                return null;
            }

            @Override
            public byte[] retrieveNext() {
                return null;
            }

            @Override
            public void clean(byte[] identifier) {
            }

            @Override
            public void flush() {
            }
        };
    }
    @SuppressWarnings("unchecked") StreamCodec<Event> lCodec = configure("codec", StreamCodec.class, context);
    if (lCodec == null) {
        codec = new EventCodec();
    } else {
        codec = lCodec;
    }
}
Also used : Storage(org.apache.apex.malhar.flume.storage.Storage) Slice(com.datatorrent.netlet.util.Slice) Collection(java.util.Collection) Event(org.apache.flume.Event) EventCodec(org.apache.apex.malhar.flume.storage.EventCodec)

Example 2 with EventCodec

use of org.apache.apex.malhar.flume.storage.EventCodec in project apex-malhar by apache.

the class ApplicationTest method populateDAG.

@Override
public void populateDAG(DAG dag, Configuration conf) {
    dag.setAttribute(com.datatorrent.api.Context.DAGContext.STREAMING_WINDOW_SIZE_MILLIS, 1000);
    FlumeInputOperator flume = dag.addOperator("FlumeOperator", new FlumeInputOperator());
    flume.setConnectAddresses(new String[] { "sink1:127.0.0.1:9098" });
    flume.setCodec(new EventCodec());
    Counter counter = dag.addOperator("Counter", new Counter());
    dag.addStream("Slices", flume.output, counter.input).setLocality(Locality.CONTAINER_LOCAL);
}
Also used : AbstractFlumeInputOperator(org.apache.apex.malhar.flume.operator.AbstractFlumeInputOperator) EventCodec(org.apache.apex.malhar.flume.storage.EventCodec)

Example 3 with EventCodec

use of org.apache.apex.malhar.flume.storage.EventCodec in project apex-malhar by apache.

the class ApplicationDiscoveryTest method populateDAG.

@Override
public void populateDAG(DAG dag, Configuration conf) {
    dag.setAttribute(com.datatorrent.api.Context.DAGContext.STREAMING_WINDOW_SIZE_MILLIS, 1000);
    FlumeInputOperator flume = dag.addOperator("FlumeOperator", new FlumeInputOperator());
    flume.setCodec(new EventCodec());
    flume.zkListener.setConnectionString("127.0.0.1:2181");
    flume.zkListener.setBasePath("/flume/basepath");
    Counter counter = dag.addOperator("Counter", new Counter());
    dag.addStream("Slices", flume.output, counter.input).setLocality(Locality.CONTAINER_LOCAL);
}
Also used : AbstractFlumeInputOperator(org.apache.apex.malhar.flume.operator.AbstractFlumeInputOperator) EventCodec(org.apache.apex.malhar.flume.storage.EventCodec)

Aggregations

EventCodec (org.apache.apex.malhar.flume.storage.EventCodec)3 AbstractFlumeInputOperator (org.apache.apex.malhar.flume.operator.AbstractFlumeInputOperator)2 Slice (com.datatorrent.netlet.util.Slice)1 Collection (java.util.Collection)1 Storage (org.apache.apex.malhar.flume.storage.Storage)1 Event (org.apache.flume.Event)1