Search in sources :

Example 56 with StreamEvent

use of co.cask.cdap.api.flow.flowlet.StreamEvent in project cdap by caskdata.

the class StreamAdminTest method writeEvent.

// simply writes a static string to a stream
private void writeEvent(StreamId streamId) throws IOException {
    StreamConfig streamConfig = getStreamAdmin().getConfig(streamId);
    FileWriter<StreamEvent> streamEventFileWriter = getFileWriterFactory().create(streamConfig, 0);
    streamEventFileWriter.append(new StreamEvent(Charsets.UTF_8.encode("EVENT")));
}
Also used : StreamEvent(co.cask.cdap.api.flow.flowlet.StreamEvent)

Example 57 with StreamEvent

use of co.cask.cdap.api.flow.flowlet.StreamEvent in project cdap by caskdata.

the class StreamConsumerTestBase method testFIFOReconfigure.

@Test
public void testFIFOReconfigure() throws Exception {
    String stream = "testReconfigure";
    StreamId streamId = TEST_NAMESPACE.stream(stream);
    StreamAdmin streamAdmin = getStreamAdmin();
    streamAdmin.create(streamId);
    StreamConfig streamConfig = streamAdmin.getConfig(streamId);
    // Writes 5 events
    writeEvents(streamConfig, "Testing ", 5);
    // Configure 3 consumers.
    streamAdmin.configureInstances(streamId, 0L, 3);
    StreamConsumerFactory consumerFactory = getConsumerFactory();
    // Starts three consumers
    List<StreamConsumer> consumers = Lists.newArrayList();
    for (int i = 0; i < 3; i++) {
        consumers.add(consumerFactory.create(streamId, "fifo.reconfigure", new ConsumerConfig(0L, i, 3, DequeueStrategy.FIFO, null)));
    }
    List<TransactionContext> txContexts = Lists.newArrayList();
    for (StreamConsumer consumer : consumers) {
        txContexts.add(createTxContext(consumer));
    }
    for (TransactionContext txContext : txContexts) {
        txContext.start();
    }
    // Consumer an item from each consumer, but only have the first one commit.
    for (int i = 0; i < consumers.size(); i++) {
        DequeueResult<StreamEvent> result = consumers.get(i).poll(1, 1, TimeUnit.SECONDS);
        Assert.assertEquals("Testing " + i, Charsets.UTF_8.decode(result.iterator().next().getBody()).toString());
        if (i == 0) {
            txContexts.get(i).finish();
        } else {
            txContexts.get(i).abort();
        }
    }
    for (StreamConsumer consumer : consumers) {
        consumer.close();
    }
    // Reconfigure to have two consumers.
    streamAdmin.configureInstances(streamId, 0L, 2);
    consumers.clear();
    for (int i = 0; i < 2; i++) {
        consumers.add(consumerFactory.create(streamId, "fifo.reconfigure", new ConsumerConfig(0L, i, 2, DequeueStrategy.FIFO, null)));
    }
    txContexts.clear();
    for (StreamConsumer consumer : consumers) {
        txContexts.add(createTxContext(consumer));
    }
    // Consumer an item from each consumer, they should see all four items.
    Set<String> messages = Sets.newTreeSet();
    boolean done;
    do {
        for (TransactionContext txContext : txContexts) {
            txContext.start();
        }
        done = true;
        for (int i = 0; i < consumers.size(); i++) {
            DequeueResult<StreamEvent> result = consumers.get(i).poll(1, 1, TimeUnit.SECONDS);
            if (result.isEmpty()) {
                continue;
            }
            done = false;
            messages.add(Charsets.UTF_8.decode(result.iterator().next().getBody()).toString());
            txContexts.get(i).finish();
        }
    } while (!done);
    Assert.assertEquals(4, messages.size());
    int count = 1;
    for (String msg : messages) {
        Assert.assertEquals("Testing " + count, msg);
        count++;
    }
    for (StreamConsumer consumer : consumers) {
        consumer.close();
    }
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) StreamEvent(co.cask.cdap.api.flow.flowlet.StreamEvent) TransactionContext(org.apache.tephra.TransactionContext) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig) Test(org.junit.Test)

Example 58 with StreamEvent

use of co.cask.cdap.api.flow.flowlet.StreamEvent in project cdap by caskdata.

the class StreamConsumerTestBase method testTTLMultipleEventsWithSameTimestamp.

@Test
public void testTTLMultipleEventsWithSameTimestamp() throws Exception {
    String stream = "testTTLMultipleEventsWithSameTimestamp";
    StreamId streamId = TEST_NAMESPACE.stream(stream);
    StreamAdmin streamAdmin = getStreamAdmin();
    // Create stream with ttl of 1 day
    final long ttl = TimeUnit.DAYS.toMillis(1);
    final long currentTime = System.currentTimeMillis();
    final long increment = TimeUnit.SECONDS.toMillis(1);
    final long approxEarliestNonExpiredTime = currentTime - TimeUnit.HOURS.toMillis(1);
    Properties streamProperties = new Properties();
    streamProperties.setProperty(Constants.Stream.TTL, Long.toString(ttl));
    streamProperties.setProperty(Constants.Stream.PARTITION_DURATION, Long.toString(ttl));
    streamAdmin.create(streamId, streamProperties);
    StreamConfig streamConfig = streamAdmin.getConfig(streamId);
    streamAdmin.configureInstances(streamId, 0L, 1);
    StreamConsumerFactory consumerFactory = getConsumerFactory();
    Assert.assertEquals(ttl, streamConfig.getTTL());
    Assert.assertEquals(ttl, streamConfig.getPartitionDuration());
    // Write 100 expired messages to stream with expired timestamp
    writeEvents(streamConfig, "Old event ", 10, new ConstantClock(0));
    // Write 500 non-expired messages to stream with timestamp approxEarliestNonExpiredTime..currentTime
    Set<StreamEvent> expectedEvents = Sets.newTreeSet(STREAM_EVENT_COMPARATOR);
    FileWriter<StreamEvent> writer = getFileWriterFactory().create(streamConfig, 0);
    try {
        expectedEvents.addAll(writeEvents(writer, "New event pre-flush ", 20, new IncrementingClock(approxEarliestNonExpiredTime, increment, 5)));
        writer.flush();
        expectedEvents.addAll(writeEvents(writer, "New event post-flush ", 20, new IncrementingClock(approxEarliestNonExpiredTime + 1, increment, 5)));
    } finally {
        writer.close();
    }
    StreamConsumer consumer = consumerFactory.create(streamId, stream, new ConsumerConfig(0L, 0, 1, DequeueStrategy.FIFO, null));
    verifyEvents(consumer, expectedEvents);
    TransactionContext txContext = createTxContext(consumer);
    txContext.start();
    try {
        // Should be no more pending events
        DequeueResult<StreamEvent> result = consumer.poll(1, 1, TimeUnit.SECONDS);
        Assert.assertTrue(result.isEmpty());
    } finally {
        txContext.finish();
    }
    consumer.close();
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) StreamEvent(co.cask.cdap.api.flow.flowlet.StreamEvent) Properties(java.util.Properties) TransactionContext(org.apache.tephra.TransactionContext) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig) Test(org.junit.Test)

Example 59 with StreamEvent

use of co.cask.cdap.api.flow.flowlet.StreamEvent in project cdap by caskdata.

the class StreamConsumerTestBase method testTTL.

@Test
public void testTTL() throws Exception {
    String stream = "testTTL";
    StreamId streamId = TEST_NAMESPACE.stream(stream);
    StreamAdmin streamAdmin = getStreamAdmin();
    // Create stream with ttl of 1 day
    final long ttl = TimeUnit.DAYS.toMillis(1);
    final long currentTime = System.currentTimeMillis();
    final long increment = TimeUnit.SECONDS.toMillis(1);
    final long approxEarliestNonExpiredTime = currentTime - TimeUnit.HOURS.toMillis(1);
    Properties streamProperties = new Properties();
    streamProperties.setProperty(Constants.Stream.TTL, Long.toString(ttl));
    streamProperties.setProperty(Constants.Stream.PARTITION_DURATION, Long.toString(ttl));
    streamAdmin.create(streamId, streamProperties);
    StreamConfig streamConfig = streamAdmin.getConfig(streamId);
    streamAdmin.configureInstances(streamId, 0L, 1);
    StreamConsumerFactory consumerFactory = getConsumerFactory();
    Assert.assertEquals(ttl, streamConfig.getTTL());
    Assert.assertEquals(ttl, streamConfig.getPartitionDuration());
    Set<StreamEvent> expectedEvents = Sets.newTreeSet(STREAM_EVENT_COMPARATOR);
    FileWriter<StreamEvent> writer = getFileWriterFactory().create(streamConfig, 0);
    try {
        // Write 10 expired messages
        writeEvents(streamConfig, "Old event ", 20, new IncrementingClock(0, 1));
        // Write 5 non-expired messages
        expectedEvents.addAll(writeEvents(streamConfig, "New event ", 12, new IncrementingClock(approxEarliestNonExpiredTime, increment)));
    } finally {
        writer.close();
    }
    // Dequeue from stream. Should only get the 5 unexpired events.
    StreamConsumer consumer = consumerFactory.create(streamId, stream, new ConsumerConfig(0L, 0, 1, DequeueStrategy.FIFO, null));
    try {
        verifyEvents(consumer, expectedEvents);
        TransactionContext txContext = createTxContext(consumer);
        txContext.start();
        try {
            // Should be no more pending events
            DequeueResult<StreamEvent> result = consumer.poll(1, 2, TimeUnit.SECONDS);
            Assert.assertTrue(result.isEmpty());
        } finally {
            txContext.finish();
        }
    } finally {
        consumer.close();
    }
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) StreamEvent(co.cask.cdap.api.flow.flowlet.StreamEvent) Properties(java.util.Properties) TransactionContext(org.apache.tephra.TransactionContext) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig) Test(org.junit.Test)

Example 60 with StreamEvent

use of co.cask.cdap.api.flow.flowlet.StreamEvent in project cdap by caskdata.

the class AbstractStreamInputFormat method createStreamEventDecoder.

@SuppressWarnings("unchecked")
protected StreamEventDecoder<K, V> createStreamEventDecoder(Configuration conf) {
    Class<? extends StreamEventDecoder> decoderClass = getDecoderClass(conf);
    Preconditions.checkNotNull(decoderClass, "Failed to load stream event decoder %s", conf.get(DECODER_TYPE));
    try {
        // to format the stream body.
        if (decoderClass.isAssignableFrom(FormatStreamEventDecoder.class)) {
            try {
                RecordFormat<StreamEvent, V> bodyFormat = getInitializedFormat(conf);
                return (StreamEventDecoder<K, V>) new FormatStreamEventDecoder(bodyFormat);
            } catch (Exception e) {
                throw new IllegalArgumentException("Unable to get the stream body format.");
            }
        } else {
            return (StreamEventDecoder<K, V>) decoderClass.newInstance();
        }
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}
Also used : IdentityStreamEventDecoder(co.cask.cdap.data.stream.decoder.IdentityStreamEventDecoder) StringStreamEventDecoder(co.cask.cdap.data.stream.decoder.StringStreamEventDecoder) StreamEventDecoder(co.cask.cdap.api.stream.StreamEventDecoder) BytesStreamEventDecoder(co.cask.cdap.data.stream.decoder.BytesStreamEventDecoder) TextStreamEventDecoder(co.cask.cdap.data.stream.decoder.TextStreamEventDecoder) FormatStreamEventDecoder(co.cask.cdap.data.stream.decoder.FormatStreamEventDecoder) StreamEvent(co.cask.cdap.api.flow.flowlet.StreamEvent) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) IOException(java.io.IOException) FormatStreamEventDecoder(co.cask.cdap.data.stream.decoder.FormatStreamEventDecoder)

Aggregations

StreamEvent (co.cask.cdap.api.flow.flowlet.StreamEvent)84 Test (org.junit.Test)65 Location (org.apache.twill.filesystem.Location)27 StreamId (co.cask.cdap.proto.id.StreamId)24 StructuredRecord (co.cask.cdap.api.data.format.StructuredRecord)19 FormatSpecification (co.cask.cdap.api.data.format.FormatSpecification)17 Schema (co.cask.cdap.api.data.schema.Schema)10 IOException (java.io.IOException)9 StreamConfig (co.cask.cdap.data2.transaction.stream.StreamConfig)8 ByteBuffer (java.nio.ByteBuffer)8 ConsumerConfig (co.cask.cdap.data2.queue.ConsumerConfig)7 StreamAdmin (co.cask.cdap.data2.transaction.stream.StreamAdmin)6 TransactionContext (org.apache.tephra.TransactionContext)6 BinaryDecoder (co.cask.cdap.common.io.BinaryDecoder)5 TypeToken (com.google.common.reflect.TypeToken)5 StreamEventCodec (co.cask.cdap.common.stream.StreamEventCodec)4 IdentityStreamEventDecoder (co.cask.cdap.data.stream.decoder.IdentityStreamEventDecoder)4 File (java.io.File)4 SchemaHash (co.cask.cdap.api.data.schema.SchemaHash)3 QueueName (co.cask.cdap.common.queue.QueueName)3