use of com.linkedin.databus.monitoring.mbean.GGParserStatistics in project databus by linkedin.
the class RelayStatsRequestProcessor method processInboundGGStats.
private void processInboundGGStats(DatabusRequest request, String category) throws IOException, RequestProcessingException {
if (!(_relay instanceof DatabusRelayMain)) {
throw new IllegalArgumentException(category + " for relay which is not DatabusRelayMain");
}
String psourceName = null;
if (category.startsWith(INBOUND_GG_PSOURCE_PREFIX)) {
psourceName = category.substring(INBOUND_GG_PSOURCE_PREFIX.length());
if (psourceName == null || psourceName.length() <= 0) {
throw new InvalidRequestParamValueException(request.getName(), INBOUND_GG_PSOURCE_PREFIX, null);
}
LOG.info("get parser stats for source " + psourceName);
}
List<String> phSourceNames = new ArrayList<String>();
EventProducer[] prods = ((DatabusRelayMain) _relay).getProducers();
GGParserStatistics stat = null;
for (EventProducer prod : prods) {
if (prod != null && (prod instanceof GoldenGateEventProducer)) {
GoldenGateEventProducer ggProducer = (GoldenGateEventProducer) prod;
String pSrcName = ggProducer.getParserStats().getPhysicalSourceName();
phSourceNames.add(pSrcName);
if (// remember the stats object
psourceName != null && psourceName.equals(pSrcName))
stat = ggProducer.getParserStats();
}
}
if (psourceName != null) {
if (stat == null)
throw new InvalidRequestParamValueException(request.getName(), INBOUND_GG_PSOURCE_PREFIX, psourceName);
writeJsonObjectToResponse(stat, request);
} else {
writeJsonObjectToResponse(phSourceNames, request);
}
}
use of com.linkedin.databus.monitoring.mbean.GGParserStatistics in project databus by linkedin.
the class TestGoldenGateEventProducer method testSCNRegressionStats.
@Test
public void testSCNRegressionStats() throws Exception {
short[] sourceIds = new short[] { 505, 506 };
String[] sourceNames = new String[] { "source1", "source2" };
PhysicalSourceStaticConfig pssc = buildSimplePssc(sourceIds, sourceNames, "gg:///tmp:xxx");
DbusEventBufferAppendable mb = createBufMult(pssc);
// start producer
GoldenGateEventProducer gg = new GoldenGateEventProducer(pssc, null, mb, null, null);
Object handleXmlCallbackObject = getHandleXmlCallbackInnerInstance(gg);
Method method = getOnTransactionEndMethod();
// generates the updates
List<String> keys = new ArrayList<String>();
keys.add("key1");
GGParserStatistics ggParserStats = gg.getParserStats();
// SCN = 10
long timestamp = System.currentTimeMillis() * DbusConstants.NUM_NSECS_IN_MSEC;
List<TransactionState.PerSourceTransactionalUpdate> dbUpdates1 = generateUpdates(sourceIds, keys, 10);
method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates1, new TransactionInfo(0, 0, timestamp, 10) });
timestamp = System.currentTimeMillis() * DbusConstants.NUM_NSECS_IN_MSEC + 1;
Assert.assertEquals("NumSCNRegressions", 0, ggParserStats.getNumSCNRegressions());
Assert.assertEquals("NumSCNRegressions", -1, ggParserStats.getLastRegressedScn());
Assert.assertEquals("MaxScn", 10, ggParserStats.getMaxScn());
// SCN = 5 Regression
List<TransactionState.PerSourceTransactionalUpdate> dbUpdates2 = // SCN Regression here
generateUpdates(sourceIds, keys, 5);
method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp + 1, 5) });
Assert.assertEquals("NumSCNRegressions", 1, ggParserStats.getNumSCNRegressions());
Assert.assertEquals("NumSCNRegressions", 5, ggParserStats.getLastRegressedScn());
Assert.assertEquals("MaxScn", 10, ggParserStats.getMaxScn());
// SCN = 6 No Regression here
dbUpdates2 = generateUpdates(sourceIds, keys, 6);
method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp + 1, 6) });
Assert.assertEquals("NumSCNRegressions", 1, ggParserStats.getNumSCNRegressions());
Assert.assertEquals("NumSCNRegressions", 5, ggParserStats.getLastRegressedScn());
Assert.assertEquals("MaxScn", 10, ggParserStats.getMaxScn());
// SCN = 3 : SCN regression again
dbUpdates2 = generateUpdates(sourceIds, keys, 3);
method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp + 1, 3) });
Assert.assertEquals("NumSCNRegressions", 2, ggParserStats.getNumSCNRegressions());
Assert.assertEquals("NumSCNRegressions", 3, ggParserStats.getLastRegressedScn());
Assert.assertEquals("MaxScn", 10, ggParserStats.getMaxScn());
// SCN = 11: No regression here
dbUpdates2 = generateUpdates(sourceIds, keys, 11);
method.invoke(handleXmlCallbackObject, new Object[] { dbUpdates2, new TransactionInfo(0, 0, timestamp + 1, 11) });
Assert.assertEquals("NumSCNRegressions", 2, ggParserStats.getNumSCNRegressions());
Assert.assertEquals("NumSCNRegressions", 3, ggParserStats.getLastRegressedScn());
Assert.assertEquals("MaxScn", 11, ggParserStats.getMaxScn());
}
use of com.linkedin.databus.monitoring.mbean.GGParserStatistics in project databus by linkedin.
the class TestGoldenGateEventProducer method testGGParserStats.
/**
* test collection of parser stats, especially lag between parsed and added files
* @throws Exception
*/
@Test
public void testGGParserStats() throws Exception {
short[] sourceIds = new short[] { 505, 506 };
String[] sourceNames = new String[] { "source1", "source2" };
// setup trail Files directory
File ggTrailDir = FileUtils.createTempDir("testGGParserStats");
// configure physical source
String uri = "gg://" + ggTrailDir.getAbsolutePath() + ":x3";
PhysicalSourceStaticConfig pssc = buildSimplePssc(sourceIds, sourceNames, uri);
LOG.info("Uri=" + uri);
// create schema
Schema s = Schema.parse(sourceAvroSchema);
VersionedSchema vs = new VersionedSchema(new VersionedSchemaId("source1", (short) 3), s, null);
// mock for schema registry
SchemaRegistryService srs = EasyMock.createMock(SchemaRegistryService.class);
EasyMock.expect(srs.fetchLatestVersionedSchemaBySourceName("source1")).andReturn(vs).anyTimes();
EasyMock.expect(srs.fetchLatestVersionedSchemaBySourceName("source2")).andReturn(vs).anyTimes();
EasyMock.expect(srs.fetchLatestVersionedSchemaBySourceName(null)).andReturn(vs);
// mock for MaxSCNReadWriter
MaxSCNReaderWriter mscn = EasyMock.createMock(MaxSCNReaderWriter.class);
EasyMock.expect(mscn.getMaxScn()).andReturn((long) -2).atLeastOnce();
mscn.saveMaxScn(EasyMock.anyLong());
EasyMock.expectLastCall().anyTimes();
EasyMock.replay(mscn);
EasyMock.replay(srs);
int totalTransWritten = 0;
int totalFilesWritten = 0;
// buffer
DbusEventBufferAppendable mb = createBufMult(pssc);
// start GG producer
GoldenGateEventProducer gg = new GoldenGateEventProducer(pssc, srs, mb, null, mscn);
// create first 2 files
addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x301"), 100, 4);
addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x302"), 200, 4);
totalTransWritten = 8;
totalFilesWritten = 2;
// get hold of parser stats object
final GGParserStatistics ggParserStats = gg.getParserStats();
// all should be 0
Assert.assertEquals(0, ggParserStats.getNumFilesParsed());
Assert.assertEquals(0, ggParserStats.getNumFilesAdded());
Assert.assertEquals(0, ggParserStats.getFilesLag());
Assert.assertEquals(0, ggParserStats.getTimeLag());
Assert.assertEquals(0, ggParserStats.getBytesLag());
try {
LOG.info("starting event producer");
// -2 here does nothing. actual setting happens thru the mock of
gg.start(-2);
// MaxSCNReadWriter
// let it parse first files
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return ggParserStats.getNumFilesParsed() == 2 && (8 * _transactionPatternSize == ggParserStats.getNumBytesTotalParsed());
}
}, "First two files parsed", 2000, LOG);
// stats in the interim
Assert.assertEquals(2, ggParserStats.getNumFilesParsed());
Assert.assertEquals(2, ggParserStats.getNumFilesAdded());
Assert.assertEquals(0, ggParserStats.getFilesLag());
Assert.assertEquals(0, ggParserStats.getTimeLag());
Assert.assertEquals(0, ggParserStats.getBytesLag());
Assert.assertEquals(totalTransWritten * _transactionPatternSize, ggParserStats.getNumBytesTotalParsed());
gg.pause();
// the file will get parsed but not processed
addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x303"), 300, 4);
totalTransWritten += 4;
totalFilesWritten++;
// to get more then a ms lag time
TestUtil.sleep(2000);
addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x304"), 400, 4);
totalTransWritten += 4;
totalFilesWritten++;
// to guarantee we picked up stats update (stats are updated
TestUtil.sleep(6000);
// every 5 seconds)
// now we should be 2 files behind. parser thread gets paused AFTER it start
// processing the file
// so the actuall value will be 1 file behind
// 303(already started being parsed), only 304 is behind
int lagFiles = 1;
// 1 file, 4 transactions each
long lagBytes = 1 * 4 * _transactionPatternSize;
/*
* Assert.assertEquals(totalFilesWritten-1, ggParserStats.getNumFilesParsed());
* Assert.assertEquals(totalFilesWritten, ggParserStats.getNumFilesAdded());
* Assert.assertEquals(lagFiles, ggParserStats.getFilesLag()); // because 303 got
* parsed
*
* // we added 4 files and parsed 3 , so the diff should be 1 file size (4
* trasactions in 1 file) Assert.assertEquals(lagBytes,
* ggParserStats.getBytesLag()); Assert.assertTrue(ggParserStats.getTimeLag()>0);
*/
gg.unpause();
TestUtil.sleep(5000);
// now we should catchup
Assert.assertEquals(4, ggParserStats.getNumFilesParsed());
Assert.assertEquals(4, ggParserStats.getNumFilesAdded());
Assert.assertEquals(0, ggParserStats.getFilesLag());
Assert.assertEquals(0, ggParserStats.getTimeLag());
Assert.assertEquals(0, ggParserStats.getBytesLag());
// append to a file
LOG.info("pausing again");
gg.pause();
addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x304"), 410, 4);
totalTransWritten += 4;
TestUtil.sleep(1000);
addToTrailFile(new File(ggTrailDir.getAbsolutePath() + "/x304"), 420, 4);
totalTransWritten += 4;
TestUtil.sleep(2000);
gg.unpause();
TestUtil.sleep(5500);
// should be still up
Assert.assertEquals(4, ggParserStats.getNumFilesParsed());
Assert.assertEquals(4, ggParserStats.getNumFilesAdded());
Assert.assertEquals(0, ggParserStats.getFilesLag());
Assert.assertEquals(0, ggParserStats.getTimeLag());
Assert.assertEquals(0, ggParserStats.getBytesLag());
// assert the stats
int totalFilesSize = totalTransWritten * _transactionPatternSize;
Assert.assertEquals((totalFilesSize / totalFilesWritten), ggParserStats.getAvgFileSize());
Assert.assertEquals(true, ggParserStats.getAvgParseTransactionTimeNs() > 0);
Assert.assertEquals("part1", ggParserStats.getPhysicalSourceName());
Assert.assertEquals(totalFilesSize / totalTransWritten, ggParserStats.getAvgTransactionSize());
Assert.assertEquals(423, ggParserStats.getMaxScn());
// 2
Assert.assertEquals(totalTransWritten * 2, ggParserStats.getNumTotalEvents());
// events
// per
// transaction
Assert.assertEquals(totalTransWritten, ggParserStats.getNumTransactionsTotal());
Assert.assertEquals(totalTransWritten, ggParserStats.getNumTransactionsWithEvents());
Assert.assertEquals(0, ggParserStats.getNumTransactionsWithoutEvents());
Assert.assertEquals(true, ggParserStats.getTimeSinceLastAccessMs() > 0);
Assert.assertEquals(totalTransWritten * _transactionPatternSize, ggParserStats.getNumBytesTotalParsed());
Assert.assertEquals("NumSCNRegressions", 0, ggParserStats.getNumSCNRegressions());
Assert.assertEquals("LastSCNRegressed", -1, ggParserStats.getLastRegressedScn());
} finally {
gg.shutdown();
}
return;
}
Aggregations