Search in sources :

Example 16 with PhysicalSourceStaticConfig

use of com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig in project databus by linkedin.

the class TestDatabusRelayMain method testPendingEventSize.

@Test
public /**
   * When the relay has no events, we should not get the x-dbus-pending-event-size even if we present a small buffer.
   * When the relay has events, we should see the header on a small buffer but see an event when the buffer
   * is large enough, and should not see the header in the large buffer case.
   */
void testPendingEventSize() throws Exception {
    DatabusRelayMain relay = null;
    try {
        final short srcId = 104;
        final String srcName = "foo";
        PhysicalSourceConfig pConfig = new PhysicalSourceConfig();
        pConfig.setId(srcId);
        pConfig.setName(srcName);
        pConfig.setUri("mock");
        short lid = (short) (srcId + 1);
        LogicalSourceConfig lConf = new LogicalSourceConfig();
        lConf.setId(lid);
        lConf.setName(srcName);
        // this is table name in the oracle source world
        lConf.setUri(srcName);
        lConf.setPartitionFunction("constant:1");
        pConfig.addSource(lConf);
        int relayPort = Utils.getAvailablePort(11994);
        final int relayId = 666;
        HttpRelay.Config httpRelayConfig = new HttpRelay.Config();
        ServerContainer.Config containerConfig = DatabusRelayTestUtil.createContainerConfig(relayId, relayPort);
        DbusEventBuffer.Config bufferConfig = DatabusRelayTestUtil.createBufferConfig(10000, 250, 100);
        httpRelayConfig.setContainer(containerConfig);
        httpRelayConfig.setEventBuffer(bufferConfig);
        httpRelayConfig.setStartDbPuller("true");
        PhysicalSourceStaticConfig[] pStaticConfigs = new PhysicalSourceStaticConfig[1];
        for (LogicalSourceConfig lsc : pConfig.getSources()) {
            httpRelayConfig.setSourceName("" + lsc.getId(), lsc.getName());
        }
        pStaticConfigs[0] = pConfig.build();
        relay = new DatabusRelayMain(httpRelayConfig.build(), pStaticConfigs);
        relay.start();
        // Insert one event into the relay.
        LogicalSource lsrc = new LogicalSource((int) lid, srcName);
        DbusEventBuffer buf = relay.getEventBuffer().getDbusEventBuffer(lsrc);
        byte[] schema = "abcdefghijklmnop".getBytes(Charset.defaultCharset());
        final long prevScn = 99;
        final long eventScn = 101;
        buf.start(prevScn);
        buf.startEvents();
        Assert.assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 100, (short) 0, System.currentTimeMillis() * 1000000, lid, schema, new byte[100], false, null));
        buf.endEvents(eventScn, null);
        HttpResponseHandler handler = new HttpResponseHandler();
        // On a good buffer length we should not see the extra header.
        testClient(relayPort, 1000, 100L, handler);
        Assert.assertNull(handler._pendingEventHeader, "Received pending event header on full buffer");
        // We should see the extra header when we get 0 events and the next event is too big to fit in
        testClient(relayPort, 10, 100L, handler);
        Assert.assertNotNull(handler._pendingEventHeader);
        Assert.assertEquals(Integer.valueOf(handler._pendingEventHeader).intValue(), 161);
        // But if there are no events, then we should not see the header even if buffer is very small
        handler._pendingEventHeader = null;
        testClient(relayPort, 10, 1005L, handler);
        Assert.assertNull(handler._pendingEventHeader, "Received pending event header on full buffer");
    } finally {
        relay.shutdownUninteruptibly();
    }
}
Also used : LogicalSourceConfig(com.linkedin.databus2.relay.config.LogicalSourceConfig) PhysicalSourceStaticConfig(com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig) HttpRelay(com.linkedin.databus.container.netty.HttpRelay) LogicalSourceConfig(com.linkedin.databus2.relay.config.LogicalSourceConfig) PhysicalSourceConfig(com.linkedin.databus2.relay.config.PhysicalSourceConfig) PhysicalSourceStaticConfig(com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) Checkpoint(com.linkedin.databus.core.Checkpoint) DbusEventBuffer(com.linkedin.databus.core.DbusEventBuffer) PhysicalSourceConfig(com.linkedin.databus2.relay.config.PhysicalSourceConfig) DbusEventKey(com.linkedin.databus.core.DbusEventKey) ServerContainer(com.linkedin.databus2.core.container.netty.ServerContainer) Test(org.testng.annotations.Test)

Example 17 with PhysicalSourceStaticConfig

use of com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig in project databus by linkedin.

the class TestGoldenGateEventProducer method testTransactionsWithDuplicateSCN.

/**
   *
   * Test to cover merging of transactions with same SCNs
   */
@Test
public void testTransactionsWithDuplicateSCN() throws Exception {
    short[] sourceIds = new short[] { 505, 506 };
    String[] sourceNames = new String[] { "source1", "source2" };
    PhysicalSourceStaticConfig pssc = buildSimplePssc(sourceIds, sourceNames, "gg:///tmp:xxx");
    DbusEventBufferAppendable mb = createBufMult(pssc);
    // start producer
    GoldenGateEventProducer gg = new GoldenGateEventProducer(pssc, null, mb, null, null);
    Object handleXmlCallbackObject = getHandleXmlCallbackInnerInstance(gg);
    Method method = getOnTransactionEndMethod();
    // generates the updates
    List<String> keys = new ArrayList<String>();
    keys.add("key1");
    // SCN = 10 - Case where both transactions having same SCNs : same set of sources and
    // same keys
    long scn = 10;
    long timestamp = System.currentTimeMillis() * DbusConstants.NUM_NSECS_IN_MSEC;
    List<TransactionState.PerSourceTransactionalUpdate> dbUpdates1 = generateUpdates(sourceIds, keys, scn);
    method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates1, new TransactionInfo(0, 0, timestamp, scn) });
    timestamp = System.currentTimeMillis() * DbusConstants.NUM_NSECS_IN_MSEC + 1;
    List<TransactionState.PerSourceTransactionalUpdate> dbUpdates2 = // Same SCN as before
    generateUpdates(sourceIds, keys, scn);
    method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp + 1, scn) });
    // Expect no events as events are not yet appended to buffer.
    testStats(gg, new EventStatsValues(sourceIds[0], 0, 0, 0, 0, 0), new EventStatsValues(sourceIds[1], 0, 0, 0, 0, 0), new EventStatsValues(GoldenGateEventProducer.GLOBAL_SOURCE_ID, 0, 0, 0, 0, 0));
    // SCN = 11 - Case where both transactions having same SCNs : same set of sources but
    // different keys
    {
        scn = 11;
        timestamp = System.currentTimeMillis() * DbusConstants.NUM_NSECS_IN_MSEC + 1;
        dbUpdates1 = generateUpdates(sourceIds, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates1, new TransactionInfo(0, 0, timestamp, scn) });
        timestamp = System.currentTimeMillis() * DbusConstants.NUM_NSECS_IN_MSEC + 1;
        keys.clear();
        keys.add("key2");
        // Same SCN as before
        dbUpdates2 = generateUpdates(sourceIds, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp, scn) });
        // Testing for SCN = 10 case
        testStats(gg, new EventStatsValues(sourceIds[0], 5, 0, 1, 0, 10), new EventStatsValues(sourceIds[1], 5, 0, 1, 0, 10), new EventStatsValues(GoldenGateEventProducer.GLOBAL_SOURCE_ID, 5, 0, 2, 0, 10));
    }
    // SCN = 12 - Case where both transactions having same SCNs but different set of
    // sources
    {
        scn = 12;
        keys.clear();
        keys.add("key2");
        timestamp = System.currentTimeMillis() * DbusConstants.NUM_NSECS_IN_MSEC + 2;
        dbUpdates1 = generateUpdates(new short[] { sourceIds[1] }, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates1, new TransactionInfo(0, 0, timestamp, scn) });
        // Same SCN
        dbUpdates2 = generateUpdates(new short[] { sourceIds[0] }, keys, scn);
        // as before
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp, scn) });
        // Testing for SCN = 11 case
        testStats(gg, new EventStatsValues(sourceIds[0], 5, 0, 3, 0, 11), new EventStatsValues(sourceIds[1], 5, 0, 3, 0, 11), new EventStatsValues(GoldenGateEventProducer.GLOBAL_SOURCE_ID, 5, 0, 6, 0, 11));
    }
    // SCN = 13 - Case where more than 2 transactions having same SCNs and keys. The keys
    // will be merged.
    {
        scn = 13;
        timestamp = System.currentTimeMillis() * DbusConstants.NUM_NSECS_IN_MSEC + 3;
        dbUpdates1 = generateUpdates(sourceIds, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates1, new TransactionInfo(0, 0, timestamp, scn) });
        // Same SCN as before
        dbUpdates2 = generateUpdates(sourceIds, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp, scn) });
        dbUpdates1 = generateUpdates(sourceIds, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates1, new TransactionInfo(0, 0, timestamp, scn) });
        // Same SCN as before
        dbUpdates2 = generateUpdates(sourceIds, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp, scn) });
        // Testing for SCN = 12 case
        testStats(gg, new EventStatsValues(sourceIds[0], 5, 0, 4, 0, 12), new EventStatsValues(sourceIds[1], 5, 0, 4, 0, 12), new EventStatsValues(GoldenGateEventProducer.GLOBAL_SOURCE_ID, 5, 0, 8, 0, 12));
    }
    // SCN = 14 - Case where more than 2 transactions having same SCNs but different keys.
    {
        scn = 14;
        timestamp = System.currentTimeMillis() * DbusConstants.NUM_NSECS_IN_MSEC + 3;
        dbUpdates1 = generateUpdates(sourceIds, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates1, new TransactionInfo(0, 0, timestamp, scn) });
        // Same SCN as before
        dbUpdates2 = generateUpdates(sourceIds, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp, scn) });
        keys.clear();
        keys.add("key5");
        dbUpdates1 = generateUpdates(sourceIds, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates1, new TransactionInfo(0, 0, timestamp, scn) });
        // Same SCN as before
        dbUpdates2 = generateUpdates(sourceIds, keys, scn);
        method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp, scn) });
        // Testing for SCN = 13 case
        testStats(gg, new EventStatsValues(sourceIds[0], 5, 0, 5, 0, 13), new EventStatsValues(sourceIds[1], 5, 0, 5, 0, 13), new EventStatsValues(GoldenGateEventProducer.GLOBAL_SOURCE_ID, 5, 0, 10, 0, 13));
    }
    // THis is an extra-call but the corresponding events will not be added to EVB.
    // This is needed to flush the events in the above call to EVB
    scn = 15;
    timestamp = System.currentTimeMillis() * DbusConstants.NUM_NSECS_IN_MSEC + 4;
    method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates1, new TransactionInfo(0, 0, timestamp, scn) });
    // Testing for SCN = 12 case
    testStats(gg, new EventStatsValues(sourceIds[0], 5, 0, 7, 0, 14), new EventStatsValues(sourceIds[1], 5, 0, 7, 0, 14), new EventStatsValues(GoldenGateEventProducer.GLOBAL_SOURCE_ID, 5, 0, 14, 0, 14));
}
Also used : PhysicalSourceStaticConfig(com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig) DbusEventBufferAppendable(com.linkedin.databus.core.DbusEventBufferAppendable) ArrayList(java.util.ArrayList) Method(java.lang.reflect.Method) TransactionInfo(com.linkedin.databus.monitoring.mbean.GGParserStatistics.TransactionInfo) Test(org.testng.annotations.Test)

Example 18 with PhysicalSourceStaticConfig

use of com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig in project databus by linkedin.

the class PhysicalSourceConfig method build.

@Override
public PhysicalSourceStaticConfig build() throws InvalidConfigException {
    checkForNulls();
    //check config options for chained relays
    if (_largestEventSizeInBytes >= _largestWindowSizeInBytes) {
        throw new InvalidConfigException("Invalid relay config: largestEventSizeInBytes has to be lesser than largestWindowSizeInBytes:" + " largestEventSizeInBytes=" + _largestEventSizeInBytes + " largestWindowSizeInBytes=" + _largestWindowSizeInBytes);
    }
    LogicalSourceStaticConfig[] sourcesStaticConfigs = new LogicalSourceStaticConfig[_sources.size()];
    for (int i = 0; i < _sources.size(); ++i) {
        sourcesStaticConfigs[i] = _sources.get(i).build();
    }
    ChunkingType chunkingType = ChunkingType.valueOf(_chunkingType);
    return new PhysicalSourceStaticConfig(_name, _id, _uri, _resourceKey, sourcesStaticConfigs, _role, _slowSourceQueryThreshold, _restartScnOffset, _retries.build(), chunkingType, _txnsPerChunk, _scnChunkSize, _chunkedScnThreshold, _maxScnDelayMs, _eventRatePerSec, _maxThrottleDurationInSecs, isDbusEventBufferSet() ? _dbusEventBuffer.build() : null, _largestEventSizeInBytes, _largestWindowSizeInBytes, _errorOnMissingFields, _xmlVersion, _xmlEncoding, _replBitSetter.build());
}
Also used : InvalidConfigException(com.linkedin.databus.core.util.InvalidConfigException) ChunkingType(com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig.ChunkingType)

Example 19 with PhysicalSourceStaticConfig

use of com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig in project databus by linkedin.

the class TestGoldenGateEventProducer method testGGParserStats.

/**
   * test collection of parser stats, especially lag between parsed and added files
   * @throws Exception
   */
@Test
public void testGGParserStats() throws Exception {
    short[] sourceIds = new short[] { 505, 506 };
    String[] sourceNames = new String[] { "source1", "source2" };
    // setup trail Files directory
    File ggTrailDir = FileUtils.createTempDir("testGGParserStats");
    // configure physical source
    String uri = "gg://" + ggTrailDir.getAbsolutePath() + ":x3";
    PhysicalSourceStaticConfig pssc = buildSimplePssc(sourceIds, sourceNames, uri);
    LOG.info("Uri=" + uri);
    // create schema
    Schema s = Schema.parse(sourceAvroSchema);
    VersionedSchema vs = new VersionedSchema(new VersionedSchemaId("source1", (short) 3), s, null);
    // mock for schema registry
    SchemaRegistryService srs = EasyMock.createMock(SchemaRegistryService.class);
    EasyMock.expect(srs.fetchLatestVersionedSchemaBySourceName("source1")).andReturn(vs).anyTimes();
    EasyMock.expect(srs.fetchLatestVersionedSchemaBySourceName("source2")).andReturn(vs).anyTimes();
    EasyMock.expect(srs.fetchLatestVersionedSchemaBySourceName(null)).andReturn(vs);
    // mock for MaxSCNReadWriter
    MaxSCNReaderWriter mscn = EasyMock.createMock(MaxSCNReaderWriter.class);
    EasyMock.expect(mscn.getMaxScn()).andReturn((long) -2).atLeastOnce();
    mscn.saveMaxScn(EasyMock.anyLong());
    EasyMock.expectLastCall().anyTimes();
    EasyMock.replay(mscn);
    EasyMock.replay(srs);
    int totalTransWritten = 0;
    int totalFilesWritten = 0;
    // buffer
    DbusEventBufferAppendable mb = createBufMult(pssc);
    // start GG producer
    GoldenGateEventProducer gg = new GoldenGateEventProducer(pssc, srs, mb, null, mscn);
    // create first 2 files
    addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x301"), 100, 4);
    addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x302"), 200, 4);
    totalTransWritten = 8;
    totalFilesWritten = 2;
    // get hold of parser stats object
    final GGParserStatistics ggParserStats = gg.getParserStats();
    // all should be 0
    Assert.assertEquals(0, ggParserStats.getNumFilesParsed());
    Assert.assertEquals(0, ggParserStats.getNumFilesAdded());
    Assert.assertEquals(0, ggParserStats.getFilesLag());
    Assert.assertEquals(0, ggParserStats.getTimeLag());
    Assert.assertEquals(0, ggParserStats.getBytesLag());
    try {
        LOG.info("starting event producer");
        // -2 here does nothing. actual setting happens thru the mock of
        gg.start(-2);
        // MaxSCNReadWriter
        // let it parse first files
        TestUtil.assertWithBackoff(new ConditionCheck() {

            @Override
            public boolean check() {
                return ggParserStats.getNumFilesParsed() == 2 && (8 * _transactionPatternSize == ggParserStats.getNumBytesTotalParsed());
            }
        }, "First two files parsed", 2000, LOG);
        // stats in the interim
        Assert.assertEquals(2, ggParserStats.getNumFilesParsed());
        Assert.assertEquals(2, ggParserStats.getNumFilesAdded());
        Assert.assertEquals(0, ggParserStats.getFilesLag());
        Assert.assertEquals(0, ggParserStats.getTimeLag());
        Assert.assertEquals(0, ggParserStats.getBytesLag());
        Assert.assertEquals(totalTransWritten * _transactionPatternSize, ggParserStats.getNumBytesTotalParsed());
        gg.pause();
        // the file will get parsed but not processed
        addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x303"), 300, 4);
        totalTransWritten += 4;
        totalFilesWritten++;
        // to get more then a ms lag time
        TestUtil.sleep(2000);
        addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x304"), 400, 4);
        totalTransWritten += 4;
        totalFilesWritten++;
        // to guarantee we picked up stats update (stats are updated
        TestUtil.sleep(6000);
        // every 5 seconds)
        // now we should be 2 files behind. parser thread gets paused AFTER it start
        // processing the file
        // so the actuall value will be 1 file behind
        // 303(already started being parsed), only 304 is behind
        int lagFiles = 1;
        // 1 file, 4 transactions each
        long lagBytes = 1 * 4 * _transactionPatternSize;
        /*
       * Assert.assertEquals(totalFilesWritten-1, ggParserStats.getNumFilesParsed());
       * Assert.assertEquals(totalFilesWritten, ggParserStats.getNumFilesAdded());
       * Assert.assertEquals(lagFiles, ggParserStats.getFilesLag()); // because 303 got
       * parsed
       *
       * // we added 4 files and parsed 3 , so the diff should be 1 file size (4
       * trasactions in 1 file) Assert.assertEquals(lagBytes,
       * ggParserStats.getBytesLag()); Assert.assertTrue(ggParserStats.getTimeLag()>0);
       */
        gg.unpause();
        TestUtil.sleep(5000);
        // now we should catchup
        Assert.assertEquals(4, ggParserStats.getNumFilesParsed());
        Assert.assertEquals(4, ggParserStats.getNumFilesAdded());
        Assert.assertEquals(0, ggParserStats.getFilesLag());
        Assert.assertEquals(0, ggParserStats.getTimeLag());
        Assert.assertEquals(0, ggParserStats.getBytesLag());
        // append to a file
        LOG.info("pausing again");
        gg.pause();
        addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x304"), 410, 4);
        totalTransWritten += 4;
        TestUtil.sleep(1000);
        addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x304"), 420, 4);
        totalTransWritten += 4;
        TestUtil.sleep(2000);
        gg.unpause();
        TestUtil.sleep(5500);
        // should be still up
        Assert.assertEquals(4, ggParserStats.getNumFilesParsed());
        Assert.assertEquals(4, ggParserStats.getNumFilesAdded());
        Assert.assertEquals(0, ggParserStats.getFilesLag());
        Assert.assertEquals(0, ggParserStats.getTimeLag());
        Assert.assertEquals(0, ggParserStats.getBytesLag());
        // assert the stats
        int totalFilesSize = totalTransWritten * _transactionPatternSize;
        Assert.assertEquals((totalFilesSize / totalFilesWritten), ggParserStats.getAvgFileSize());
        Assert.assertEquals(true, ggParserStats.getAvgParseTransactionTimeNs() > 0);
        Assert.assertEquals("part1", ggParserStats.getPhysicalSourceName());
        Assert.assertEquals(totalFilesSize / totalTransWritten, ggParserStats.getAvgTransactionSize());
        Assert.assertEquals(423, ggParserStats.getMaxScn());
        // 2
        Assert.assertEquals(totalTransWritten * 2, ggParserStats.getNumTotalEvents());
        // events
        // per
        // transaction
        Assert.assertEquals(totalTransWritten, ggParserStats.getNumTransactionsTotal());
        Assert.assertEquals(totalTransWritten, ggParserStats.getNumTransactionsWithEvents());
        Assert.assertEquals(0, ggParserStats.getNumTransactionsWithoutEvents());
        Assert.assertEquals(true, ggParserStats.getTimeSinceLastAccessMs() > 0);
        Assert.assertEquals(totalTransWritten * _transactionPatternSize, ggParserStats.getNumBytesTotalParsed());
        Assert.assertEquals("NumSCNRegressions", 0, ggParserStats.getNumSCNRegressions());
        Assert.assertEquals("LastSCNRegressed", -1, ggParserStats.getLastRegressedScn());
    } finally {
        gg.shutdown();
    }
    return;
}
Also used : ConditionCheck(com.linkedin.databus2.test.ConditionCheck) PhysicalSourceStaticConfig(com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig) MaxSCNReaderWriter(com.linkedin.databus2.core.seq.MaxSCNReaderWriter) GGParserStatistics(com.linkedin.databus.monitoring.mbean.GGParserStatistics) VersionedSchemaId(com.linkedin.databus2.schemas.VersionedSchemaId) Schema(org.apache.avro.Schema) VersionedSchema(com.linkedin.databus2.schemas.VersionedSchema) SchemaRegistryService(com.linkedin.databus2.schemas.SchemaRegistryService) DbusEventBufferAppendable(com.linkedin.databus.core.DbusEventBufferAppendable) VersionedSchema(com.linkedin.databus2.schemas.VersionedSchema) File(java.io.File) Test(org.testng.annotations.Test)

Example 20 with PhysicalSourceStaticConfig

use of com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig in project databus by linkedin.

the class TestGoldenGateEventProducer method testAddEventToBufferRateControl.

private void testAddEventToBufferRateControl(long throttleDurationInSecs) throws InvalidConfigException, UnsupportedKeyException, DatabusException, NoSuchFieldException, IllegalAccessException {
    // 1 event per second required. Send 5 events. Must have 4 sleeps.
    long rate = 1;
    int numEvents = 5;
    PhysicalSourceStaticConfig pssc = buildPssc(rate, throttleDurationInSecs);
    long scn = 10;
    DbusEventBufferAppendable mb = createBufMult(pssc);
    GoldenGateEventProducer gg = new GoldenGateEventProducer(pssc, null, mb, null, null);
    // enable if want to run with mocked timer
    // run_with_mock_timer(gg);
    int sourceId = 505;
    HashSet<DBUpdateImage> db = new HashSet<DBUpdateImage>();
    // name1 is the only key
    ColumnsState.KeyPair kp1 = new ColumnsState.KeyPair(new String("name1"), Schema.Type.RECORD);
    ArrayList<ColumnsState.KeyPair> keyPairs = new ArrayList<ColumnsState.KeyPair>(numEvents);
    keyPairs.add(kp1);
    Schema s = Schema.parse(avroSchema2);
    GenericRecord gr1 = new GenericData.Record(s);
    gr1.put("name1", "phani1");
    gr1.put("name2", "boris1");
    GenericRecord gr2 = new GenericData.Record(s);
    gr2.put("name1", "phani2");
    gr2.put("name2", "boris2");
    GenericRecord gr3 = new GenericData.Record(s);
    gr3.put("name1", "phani3");
    gr3.put("name2", "boris3");
    GenericRecord gr4 = new GenericData.Record(s);
    gr4.put("name1", "phani4");
    gr4.put("name2", "boris4");
    GenericRecord gr5 = new GenericData.Record(s);
    gr5.put("name1", "phani5");
    gr5.put("name2", "boris5");
    DBUpdateImage dbi1 = new DBUpdateImage(keyPairs, scn, gr1, s, DbUpdateState.DBUpdateImage.OpType.INSERT, false);
    DBUpdateImage dbi2 = new DBUpdateImage(keyPairs, scn, gr2, s, DbUpdateState.DBUpdateImage.OpType.INSERT, false);
    DBUpdateImage dbi3 = new DBUpdateImage(keyPairs, scn, gr3, s, DbUpdateState.DBUpdateImage.OpType.INSERT, false);
    DBUpdateImage dbi4 = new DBUpdateImage(keyPairs, scn, gr4, s, DbUpdateState.DBUpdateImage.OpType.INSERT, false);
    DBUpdateImage dbi5 = new DBUpdateImage(keyPairs, scn, gr5, s, DbUpdateState.DBUpdateImage.OpType.INSERT, false);
    db.add(dbi1);
    db.add(dbi2);
    db.add(dbi3);
    db.add(dbi4);
    db.add(dbi5);
    // For a given transaction, and logical source : only 1 update ( the last one succeeds )
    Assert.assertEquals(1, db.size());
    // Generate 5 transactions with the same update
    for (int i = 0; i < numEvents; i++) {
        List<TransactionState.PerSourceTransactionalUpdate> dbUpdates = new ArrayList<TransactionState.PerSourceTransactionalUpdate>(10);
        TransactionState.PerSourceTransactionalUpdate dbUpdate = new TransactionState.PerSourceTransactionalUpdate(sourceId, db);
        dbUpdates.add(dbUpdate);
        long timestamp = 60;
        gg.addEventToBuffer(dbUpdates, new TransactionInfo(0, 0, timestamp, scn));
        scn++;
    }
    // It may not sleep the very first time as 1 second may have elapsed from when the rate control got started to when event in
    // getting inserted. Subsequently, expect rate control to kick in
    long numSleeps = Math.min(numEvents, throttleDurationInSecs);
    Assert.assertEquals(gg.getRateControl().getNumSleeps(), numSleeps);
    gg.getRateControl().resetNumSleeps();
    return;
}
Also used : PhysicalSourceStaticConfig(com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig) TransactionState(com.linkedin.databus2.ggParser.XmlStateMachine.TransactionState) DbusEventBufferAppendable(com.linkedin.databus.core.DbusEventBufferAppendable) Schema(org.apache.avro.Schema) VersionedSchema(com.linkedin.databus2.schemas.VersionedSchema) DBUpdateImage(com.linkedin.databus2.ggParser.XmlStateMachine.DbUpdateState.DBUpdateImage) ColumnsState(com.linkedin.databus2.ggParser.XmlStateMachine.ColumnsState) ArrayList(java.util.ArrayList) TransactionInfo(com.linkedin.databus.monitoring.mbean.GGParserStatistics.TransactionInfo) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord) HashSet(java.util.HashSet)

Aggregations

PhysicalSourceStaticConfig (com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig)21 ArrayList (java.util.ArrayList)10 Test (org.testng.annotations.Test)9 PhysicalSourceConfig (com.linkedin.databus2.relay.config.PhysicalSourceConfig)8 DbusEventBufferAppendable (com.linkedin.databus.core.DbusEventBufferAppendable)7 PhysicalPartition (com.linkedin.databus.core.data_model.PhysicalPartition)7 InvalidConfigException (com.linkedin.databus.core.util.InvalidConfigException)6 LogicalSourceConfig (com.linkedin.databus2.relay.config.LogicalSourceConfig)6 TransactionInfo (com.linkedin.databus.monitoring.mbean.GGParserStatistics.TransactionInfo)5 LogicalSourceStaticConfig (com.linkedin.databus2.relay.config.LogicalSourceStaticConfig)5 DbusEventBuffer (com.linkedin.databus.core.DbusEventBuffer)4 NoSuchSchemaException (com.linkedin.databus2.schemas.NoSuchSchemaException)4 ObjectMapper (org.codehaus.jackson.map.ObjectMapper)4 EventSourceStatistics (com.linkedin.databus.monitoring.mbean.EventSourceStatistics)3 DatabusException (com.linkedin.databus2.core.DatabusException)3 EventProducer (com.linkedin.databus2.producers.EventProducer)3 PartitionFunction (com.linkedin.databus2.producers.PartitionFunction)3 OracleEventProducer (com.linkedin.databus2.producers.db.OracleEventProducer)3 HashSet (java.util.HashSet)3 BeforeTest (org.testng.annotations.BeforeTest)3