Search in sources :

Example 16 with DbusEvent

use of com.linkedin.databus.core.DbusEvent in project databus by linkedin.

the class TestUnifiedClientStats method testBasicHistogramMetrics.

/**
   * Tests the basic (non-aggregated) functionality of the histogram/percentile metrics
   * (timeLagSourceToReceiptMs and timeLagConsumerCallbacksMs).
   */
@Test
public void testBasicHistogramMetrics() {
    // (1) create stats object
    UnifiedClientStats unifiedClientStats = new UnifiedClientStats(3, /* ownerId */
    "stats_name", "stats_dim");
    for (int i = 0; i < 200; ++i) {
        // Without the ability to override System.currentTimeMillis() (or hacking UnifiedClientStats to use an
        // overridable method to provide the time, and then overriding it here), there's a small chance that
        // our System.currentTimeMillis() call and that in registerDataEventReceived() will return values that
        // differ by a non-constant amount (i.e., jitter).  But we can manage that with inequalities in our
        // assertions.
        // Expected histogram values for timeLagSourceToReceiptMs range from 0 to 1990 ms (approximately).
        long sourceTimestampNs = (System.currentTimeMillis() - 10 * i) * DbusConstants.NUM_NSECS_IN_MSEC;
        // We have perfect control over the values for timeLagConsumerCallbacksMs.  Make calculations trivial:
        // histogram values will be 0 through 199 ms (exactly).
        long callbackTimeElapsedNs = (long) i * DbusConstants.NUM_NSECS_IN_MSEC;
        // (2) create 200 fake DbusEvents
        DbusEvent dbusEvent = createEvent(sourceTimestampNs);
        // (3) call registerDataEventReceived() and registerCallbacksProcessed() for each event
        // (normally there are more of the latter since there are more callback types than just onDataEvent(),
        // but it doesn't really matter, and it simplifies things if we keep a fixed ratio--here just 1:1)
        unifiedClientStats.registerDataEventReceived(dbusEvent);
        unifiedClientStats.registerCallbacksProcessed(callbackTimeElapsedNs);
    }
    // (4) verify histogram values are as expected
    // Both metrics-core and Apache Commons Math use the "R-6" quantile-estimation method, as described
    // at http://en.wikipedia.org/wiki/Quantile .
    //
    // N = 200
    // p = 0.5, 0.9, 0.95, 0.99
    // h = (N+1)*p = 100.5, 180.9, 190.95, 198.99
    //
    // Q[50th]  =  x[100-1] + (100.5  - 100)*(x[100-1+1] - x[100-1])  =   99.0 + 0.5 *(100.0 -  99.0)  =   99.5
    // Q[90th]  =  x[180-1] + (180.9  - 180)*(x[180-1+1] - x[180-1])  =  179.0 + 0.9 *(180.0 - 179.0)  =  179.9
    // Q[95th]  =  x[190-1] + (190.95 - 190)*(x[190-1+1] - x[190-1])  =  189.0 + 0.95*(190.0 - 189.0)  =  189.95
    // Q[99th]  =  x[198-1] + (198.99 - 198)*(x[198-1+1] - x[198-1])  =  197.0 + 0.99*(198.0 - 197.0)  =  197.99
    assertEquals("unexpected timeLagConsumerCallbacksMs 50th percentile", 99.5, unifiedClientStats.getTimeLagConsumerCallbacksMs_HistPct_50());
    assertEquals("unexpected timeLagConsumerCallbacksMs 90th percentile", 179.9, unifiedClientStats.getTimeLagConsumerCallbacksMs_HistPct_90());
    assertEquals("unexpected timeLagConsumerCallbacksMs 95th percentile", 189.95, unifiedClientStats.getTimeLagConsumerCallbacksMs_HistPct_95());
    assertEquals("unexpected timeLagConsumerCallbacksMs 99th percentile", 197.99, unifiedClientStats.getTimeLagConsumerCallbacksMs_HistPct_99());
    assertEquals("unexpected timeLagConsumerCallbacksMs max value", 199.0, unifiedClientStats.getTimeLagConsumerCallbacksMs_Max());
    // See sourceTimestampNs comment above.  Approximately:
    // Q[50th]  =  x[100-1] + (100.5  - 100)*(x[100-1+1] - x[100-1])  =   990.0 + 0.5 *(1000.0 -  990.0)  =   995.0
    // Q[90th]  =  x[180-1] + (180.9  - 180)*(x[180-1+1] - x[180-1])  =  1790.0 + 0.9 *(1800.0 - 1790.0)  =  1799.0
    // Q[95th]  =  x[190-1] + (190.95 - 190)*(x[190-1+1] - x[190-1])  =  1890.0 + 0.95*(1900.0 - 1890.0)  =  1899.5
    // Q[99th]  =  x[198-1] + (198.99 - 198)*(x[198-1+1] - x[198-1])  =  1970.0 + 0.99*(1980.0 - 1970.0)  =  1979.9
    // ...but allow +/-1 for jitter
    double percentile = unifiedClientStats.getTimeLagSourceToReceiptMs_HistPct_50();
    assertTrue("unexpected timeLagSourceToReceiptMs 50th percentile: " + percentile, // nominal value is 995.0
    994.0 <= percentile && percentile <= 996.0);
    percentile = unifiedClientStats.getTimeLagSourceToReceiptMs_HistPct_90();
    assertTrue("unexpected timeLagSourceToReceiptMs 90th percentile: " + percentile, // nominal value is 1799.0
    1798.0 <= percentile && percentile <= 1800.0);
    percentile = unifiedClientStats.getTimeLagSourceToReceiptMs_HistPct_95();
    assertTrue("unexpected timeLagSourceToReceiptMs 95th percentile: " + percentile, // nominal value is 1899.5, but saw 1900.45 once
    1898.5 <= percentile && percentile <= 1900.5);
    percentile = unifiedClientStats.getTimeLagSourceToReceiptMs_HistPct_99();
    assertTrue("unexpected timeLagSourceToReceiptMs 99th percentile: " + percentile, // nominal value is 1979.9
    1978.9 <= percentile && percentile <= 1980.9);
}
Also used : UnifiedClientStats(com.linkedin.databus.client.pub.mbean.UnifiedClientStats) DbusEvent(com.linkedin.databus.core.DbusEvent) Test(org.testng.annotations.Test)

Example 17 with DbusEvent

use of com.linkedin.databus.core.DbusEvent in project databus by linkedin.

the class TestInternalMetadata method testGetMetadata_UnhappyPath_MissingSchema.

/**
   * Verifies that getMetadata() throws an exception if the metadata schema specified
   * in the event header is unavailable.
   */
@Test
public void testGetMetadata_UnhappyPath_MissingSchema() throws Exception {
    LOG.info("starting testGetMetadata_UnhappyPath_MissingSchema()");
    // build the event's metadata and then the event
    DbusEventPart metadataPart = createMetadataPart();
    DbusEvent event = createEvent(metadataPart);
    // create an empty metadata schema set
    VersionedSchemaSet metadataSchemaSet = new VersionedSchemaSet();
    // now create the decoder and attempt to use it to extract and decode the event's metadata
    DbusEventAvroDecoder eventDecoder = createDecoder(metadataSchemaSet);
    try {
        GenericRecord reuse = null;
        GenericRecord decodedMetadata = eventDecoder.getMetadata(event, reuse);
        Assert.fail("getMetadata() should have thrown exception");
    } catch (Exception ex) {
    // expected case:  event had metadata, but schema to decode it was missing
    }
    LOG.info("leaving testGetMetadata_UnhappyPath_MissingSchema()");
}
Also used : DbusEventPart(com.linkedin.databus.core.DbusEventPart) DbusEvent(com.linkedin.databus.core.DbusEvent) DbusEventAvroDecoder(com.linkedin.databus.client.DbusEventAvroDecoder) VersionedSchemaSet(com.linkedin.databus2.schemas.VersionedSchemaSet) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.testng.annotations.Test)

Example 18 with DbusEvent

use of com.linkedin.databus.core.DbusEvent in project databus by linkedin.

the class TestInternalMetadata method testGetMetadata_UnhappyPath_BadSchema.

/**
   * Verifies that getMetadata() returns null if there's a mismatch between the event's metadata
   * and the metadata schema whose signature/checksum is specified in the event header.
   */
@Test
public void testGetMetadata_UnhappyPath_BadSchema() throws Exception {
    LOG.info("starting testGetMetadata_UnhappyPath_BadSchema()");
    // build the event's metadata and then the event
    DbusEventPart metadataPart = createMetadataPart();
    DbusEvent event = createEvent(metadataPart);
    // create a metadata schema set with a schema that claims to match the event's
    // metadata but doesn't actually
    VersionedSchemaSet metadataSchemaSet = new VersionedSchemaSet();
    metadataSchemaSet.add(SchemaRegistryService.DEFAULT_METADATA_SCHEMA_SOURCE, // METADATA_SCHEMA_VERSION
    metadataPart.getSchemaVersion(), // METADATA_SCHEMA_CHECKSUM
    new SchemaId(metadataPart.getSchemaDigest()), INCORRECT_METADATA_SCHEMA, // preserve original string
    true);
    // now create the decoder and attempt to use it to extract and decode the event's metadata
    DbusEventAvroDecoder eventDecoder = createDecoder(metadataSchemaSet);
    try {
        GenericRecord reuse = null;
        GenericRecord decodedMetadata = eventDecoder.getMetadata(event, reuse);
        Assert.assertNull(decodedMetadata, "getMetadata() should have returned null;");
    } catch (Exception ex) {
        Assert.fail("getMetadata() should not have thrown exception: " + ex);
    }
    LOG.info("leaving testGetMetadata_UnhappyPath_BadSchema()");
}
Also used : DbusEventPart(com.linkedin.databus.core.DbusEventPart) DbusEvent(com.linkedin.databus.core.DbusEvent) DbusEventAvroDecoder(com.linkedin.databus.client.DbusEventAvroDecoder) SchemaId(com.linkedin.databus2.schemas.SchemaId) VersionedSchemaSet(com.linkedin.databus2.schemas.VersionedSchemaSet) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.testng.annotations.Test)

Example 19 with DbusEvent

use of com.linkedin.databus.core.DbusEvent in project databus by linkedin.

the class TestInternalMetadata method testGetMetadata_HappyPath.

/**
   * Verifies that getMetadata() returns the expected GenericRecord for the event's
   * metadata and that it has the expected fields and values in it.
   */
@Test
public void testGetMetadata_HappyPath() throws Exception {
    LOG.info("starting testGetMetadata_HappyPath()");
    // build the event's metadata and then the event
    DbusEventPart metadataPart = createMetadataPart();
    DbusEvent event = createEvent(metadataPart);
    // create a metadata schema set that correctly corresponds to the metadata
    VersionedSchemaSet metadataSchemaSet = new VersionedSchemaSet();
    metadataSchemaSet.add(SchemaRegistryService.DEFAULT_METADATA_SCHEMA_SOURCE, // METADATA_SCHEMA_VERSION
    metadataPart.getSchemaVersion(), // METADATA_SCHEMA_CHECKSUM
    new SchemaId(metadataPart.getSchemaDigest()), CORRECT_METADATA_SCHEMA, // preserve original string
    true);
    // now create the decoder and use it to extract and decode the event's metadata
    DbusEventAvroDecoder eventDecoder = createDecoder(metadataSchemaSet);
    try {
        GenericRecord reuse = null;
        GenericRecord decodedMetadata = eventDecoder.getMetadata(event, reuse);
        Assert.assertNotNull(decodedMetadata, "getMetadata() returned null GenericRecord;");
        Utf8 etag = (Utf8) decodedMetadata.get("etag");
        Assert.assertEquals(etag.toString(), "dunno what an etag is");
        Integer flags = (Integer) decodedMetadata.get("flags");
        Assert.assertEquals(flags, null, "expected flags to be null");
        Long expires = (Long) decodedMetadata.get("expires");
        Assert.assertNotNull(expires, "expected expires to have a value;");
        Assert.assertEquals(expires.longValue(), 1366150681);
        Utf8 nonexistentField = (Utf8) decodedMetadata.get("nonexistentField");
        Assert.assertNull(nonexistentField, "unexpected value for 'nonexistentField';");
    } catch (Exception ex) {
        Assert.fail("unexpected error decoding metadata: " + ex);
    }
    LOG.info("leaving testGetMetadata_HappyPath()");
}
Also used : DbusEventPart(com.linkedin.databus.core.DbusEventPart) DbusEvent(com.linkedin.databus.core.DbusEvent) DbusEventAvroDecoder(com.linkedin.databus.client.DbusEventAvroDecoder) SchemaId(com.linkedin.databus2.schemas.SchemaId) Utf8(org.apache.avro.util.Utf8) VersionedSchemaSet(com.linkedin.databus2.schemas.VersionedSchemaSet) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.testng.annotations.Test)

Example 20 with DbusEvent

use of com.linkedin.databus.core.DbusEvent in project databus by linkedin.

the class TestMultiConsumerCallback method test3GroupedStreamConsumersHappyPath.

@Test(groups = { "small", "functional" })
public void test3GroupedStreamConsumersHappyPath() {
    LOG.info("\n\nstarting test3GroupedStreamConsumersHappyPath()");
    Hashtable<Long, AtomicInteger> keyCounts = new Hashtable<Long, AtomicInteger>();
    DbusEventBuffer eventsBuf = new DbusEventBuffer(_generic100KBufferStaticConfig);
    eventsBuf.start(0);
    eventsBuf.startEvents();
    initBufferWithEvents(eventsBuf, 1, 1, (short) 1, keyCounts);
    initBufferWithEvents(eventsBuf, 2, 2, (short) 3, keyCounts);
    eventsBuf.endEvents(100L);
    DatabusStreamConsumer mockConsumer1 = EasyMock.createStrictMock("consumer1", DatabusStreamConsumer.class);
    EasyMock.makeThreadSafe(mockConsumer1, true);
    DatabusStreamConsumer mockConsumer2 = EasyMock.createStrictMock("consumer2", DatabusStreamConsumer.class);
    EasyMock.makeThreadSafe(mockConsumer2, true);
    DatabusStreamConsumer mockConsumer3 = EasyMock.createStrictMock("consumer3", DatabusStreamConsumer.class);
    EasyMock.makeThreadSafe(mockConsumer3, true);
    DatabusCombinedConsumer sdccMockConsumer1 = new SelectingDatabusCombinedConsumer(mockConsumer1);
    DatabusCombinedConsumer sdccMockConsumer2 = new SelectingDatabusCombinedConsumer(mockConsumer2);
    DatabusCombinedConsumer sdccMockConsumer3 = new SelectingDatabusCombinedConsumer(mockConsumer3);
    List<String> sources = new ArrayList<String>();
    Map<Long, IdNamePair> sourcesMap = new HashMap<Long, IdNamePair>();
    for (int i = 1; i <= 3; ++i) {
        IdNamePair sourcePair = new IdNamePair((long) i, "source" + i);
        sources.add(sourcePair.getName());
        sourcesMap.put(sourcePair.getId(), sourcePair);
    }
    DatabusV2ConsumerRegistration consumerReg1 = new DatabusV2ConsumerRegistration(Arrays.asList(sdccMockConsumer1, sdccMockConsumer2, sdccMockConsumer3), sources, null);
    List<DatabusV2ConsumerRegistration> allRegistrations = Arrays.asList(consumerReg1);
    MultiConsumerCallback callback = new MultiConsumerCallback(allRegistrations, Executors.newCachedThreadPool(), 1000, new StreamConsumerCallbackFactory(null, null), null, null, null, null);
    callback.setSourceMap(sourcesMap);
    DbusEventBuffer.DbusEventIterator iter = eventsBuf.acquireIterator("myIter1");
    assert iter.hasNext() : "unable to read event";
    DbusEvent event1 = iter.next();
    assert iter.hasNext() : "unable to read event";
    DbusEvent event2 = iter.next();
    assert iter.hasNext() : "unable to read event";
    DbusEvent event3 = iter.next();
    initMockStreamConsumer3OptEventFullLifecycle(mockConsumer1, event1, event2, event3, keyCounts);
    initMockStreamConsumer3OptEventFullLifecycle(mockConsumer2, event1, event2, event3, keyCounts);
    initMockStreamConsumer3OptEventFullLifecycle(mockConsumer3, event1, event2, event3, keyCounts);
    assert3EventFullLifecycle(callback, event1, event2, event3);
    EasyMock.verify(mockConsumer1);
    EasyMock.verify(mockConsumer2);
    EasyMock.verify(mockConsumer3);
    assert (keyCounts.get(1L).get() + keyCounts.get(2L).get() + keyCounts.get(3L).get()) == 3 : "invalid number of calls: " + keyCounts.get(1L).get() + "," + keyCounts.get(2L).get() + "," + keyCounts.get(3L).get();
}
Also used : DatabusStreamConsumer(com.linkedin.databus.client.pub.DatabusStreamConsumer) DbusEvent(com.linkedin.databus.core.DbusEvent) HashMap(java.util.HashMap) Hashtable(java.util.Hashtable) ArrayList(java.util.ArrayList) DbusEventBuffer(com.linkedin.databus.core.DbusEventBuffer) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) IdNamePair(com.linkedin.databus.core.util.IdNamePair) DatabusCombinedConsumer(com.linkedin.databus.client.pub.DatabusCombinedConsumer) Test(org.testng.annotations.Test) BeforeTest(org.testng.annotations.BeforeTest) AfterTest(org.testng.annotations.AfterTest)

Aggregations

DbusEvent (com.linkedin.databus.core.DbusEvent)30 Test (org.testng.annotations.Test)21 ArrayList (java.util.ArrayList)20 IdNamePair (com.linkedin.databus.core.util.IdNamePair)18 HashMap (java.util.HashMap)18 Checkpoint (com.linkedin.databus.core.Checkpoint)12 DatabusStreamConsumer (com.linkedin.databus.client.pub.DatabusStreamConsumer)11 DbusEventBuffer (com.linkedin.databus.core.DbusEventBuffer)11 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)11 Hashtable (java.util.Hashtable)10 UnifiedClientStats (com.linkedin.databus.client.pub.mbean.UnifiedClientStats)9 AfterTest (org.testng.annotations.AfterTest)9 BeforeTest (org.testng.annotations.BeforeTest)9 DatabusV2ConsumerRegistration (com.linkedin.databus.client.consumer.DatabusV2ConsumerRegistration)8 MultiConsumerCallback (com.linkedin.databus.client.consumer.MultiConsumerCallback)8 StreamConsumerCallbackFactory (com.linkedin.databus.client.consumer.StreamConsumerCallbackFactory)8 DatabusSubscription (com.linkedin.databus.core.data_model.DatabusSubscription)8 DbusEventAppender (com.linkedin.databus.core.test.DbusEventAppender)8 DbusEventGenerator (com.linkedin.databus.core.test.DbusEventGenerator)8 UncaughtExceptionTrackingThread (com.linkedin.databus.core.util.UncaughtExceptionTrackingThread)8