use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class BootstrapProducerCallback method onEndDataEventSequence.
@Override
public ConsumerCallbackResult onEndDataEventSequence(SCN endScn) {
try {
// Update the metadata for all sources
updateAllProducerSourcesMetaData();
_oldWindowScn = _newWindowScn;
// Update all the sources info in the database.
// If we need to create new log file for a source, create one.
updateSourcesInDB();
boolean markActive = false;
if (_state == BootstrapProducerStatus.SEEDING_CATCHUP) {
if (_newWindowScn > _seedCatchupScn) {
LOG.info("Bootstrap DB for sources (" + _trackedSources.values() + ") has completed the seeding catchup phase. Marking them active in bootstrap_sources table !! SeedCatchupSCN was :" + _seedCatchupScn);
markActive = true;
}
} else if (_state == BootstrapProducerStatus.FELL_OFF_RELAY) {
if (_newWindowScn > _producerStartScn) {
LOG.info("Bootstrap DB for sources (" + _trackedSources.values() + ") has started getting events since last fell-off relay !! Marking them active !!");
markActive = true;
}
}
if (markActive)
_bootstrapDao.updateSourcesStatus(_trackedSources.keySet(), BootstrapProducerStatus.ACTIVE);
Connection conn = getConnection();
try {
DBHelper.commit(conn);
} catch (SQLException s) {
DBHelper.rollback(conn);
throw s;
}
if (markActive) {
_state = BootstrapProducerStatus.ACTIVE;
for (SourceInfo info : _trackedSources.values()) {
info.setStatus(BootstrapProducerStatus.ACTIVE);
}
}
LOG.info("bootstrap producer upto scn " + _newWindowScn);
} catch (SQLException e) {
if (null != _statsCollector)
_statsCollector.registerSQLException();
LOG.error("Got SQLException in endDataEventSequence Handler !! Connections will be reset !!", e);
try {
reset();
} catch (DatabusException e2) {
DbusPrettyLogUtils.logExceptionAtError("Unable to reset connection", e2, LOG);
} catch (SQLException sqlEx) {
LOG.error("Got exception while resetting connections. Stopping Client !!", sqlEx);
return ConsumerCallbackResult.ERROR_FATAL;
}
return ConsumerCallbackResult.ERROR;
} finally {
_totalRm.stop();
long latency = _totalRm.getDuration() / 1000000L;
if (null != _statsCollector) {
_statsCollector.registerEndWindow(latency, _totalNumEvents, _newWindowScn);
}
}
return ConsumerCallbackResult.SUCCESS;
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class OracleJarUtils method createOracleDataSource.
/**
*
* @param set URI after loading the class OracleDataSource, and instantiating an object
* @return
* @throws DatabusException
*/
public static DataSource createOracleDataSource(String uri) throws Exception {
// Create the OracleDataSource used to get DB connection(s)
DataSource ds = null;
try {
Class oracleDataSourceClass = loadClass("oracle.jdbc.pool.OracleDataSource");
Object ods = oracleDataSourceClass.newInstance();
ds = (DataSource) ods;
Method setURLMethod = oracleDataSourceClass.getMethod("setURL", String.class);
Method getConnectionPropertiesMethod = oracleDataSourceClass.getMethod("getConnectionProperties");
Method setConnectionPropertiesMethod = oracleDataSourceClass.getMethod("setConnectionProperties", Properties.class);
setURLMethod.invoke(ods, uri);
// DDS-425. Set oracle.jdbc.V8Compatible so DATE column will be mapped to java.sql.TimeStamp
// oracle jdbc 11g fixed this. So we can skip this after will upgrade jdbc to 11g.
Properties prop = (Properties) getConnectionPropertiesMethod.invoke(ods);
if (prop == null) {
prop = new Properties();
}
//prop.put("oracle.jdbc.V8Compatible","true");
setConnectionPropertiesMethod.invoke(ods, prop);
} catch (Exception e) {
String errMsg = "Error trying to create an Oracle DataSource";
LOG.error(errMsg, e);
throw e;
}
return ds;
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class TestRegisterRequestProcessor method testDatabusExceptionInGetSchemas.
private void testDatabusExceptionInGetSchemas(final int protoVersion) throws Exception {
LOG.info("Testing DatabusException in getSchemas() call with protocol version " + protoVersion);
Properties params = new Properties();
final int srcId1 = 101;
final String srcName1 = "source-101";
final String docSchema1 = "docSchema1";
final String docSchema2 = "docSchema2";
final short docSchemaV1 = 1;
final short docSchemaV2 = 2;
if (protoVersion != 0) {
params.setProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, Integer.toString(protoVersion));
}
params.setProperty(RegisterRequestProcessor.SOURCES_PARAM, Integer.toString(srcId1));
final StringBuilder responseStr = new StringBuilder();
ChunkedWritableByteChannel chunkedWritableByteChannel = EasyMock.createMock(ChunkedWritableByteChannel.class);
chunkedWritableByteChannel.addMetadata(EasyMock.anyObject(String.class), EasyMock.anyInt());
EasyMock.expectLastCall().times(1);
chunkedWritableByteChannel.write(EasyMock.anyObject(ByteBuffer.class));
EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
Charset charset = Charset.forName("UTF-8");
CharsetDecoder decoder = charset.newDecoder();
responseStr.append(decoder.decode((ByteBuffer) EasyMock.getCurrentArguments()[0]));
return responseStr.length();
}
});
EasyMock.replay(chunkedWritableByteChannel);
DatabusRequest mockReq = EasyMock.createMock(DatabusRequest.class);
EasyMock.expect(mockReq.getParams()).andReturn(params).anyTimes();
EasyMock.replay(mockReq);
LogicalSource lsrc1 = new LogicalSource(srcId1, srcName1);
SourceIdNameRegistry mockSrcIdReg = EasyMock.createMock(SourceIdNameRegistry.class);
EasyMock.expect(mockSrcIdReg.getSource(srcId1)).andReturn(lsrc1).anyTimes();
EasyMock.replay(mockSrcIdReg);
Map<Short, String> srcSchemaVersions = new HashMap<Short, String>();
srcSchemaVersions.put(docSchemaV1, docSchema1);
srcSchemaVersions.put(docSchemaV2, docSchema2);
DatabusException expectedCause = new DatabusException("FakeException");
SchemaRegistryService mockSchemaReg = EasyMock.createMock(SchemaRegistryService.class);
EasyMock.expect(mockSchemaReg.fetchAllSchemaVersionsBySourceName(srcName1)).andThrow(expectedCause);
EasyMock.replay(mockSchemaReg);
HttpRelay mockRelay = EasyMock.createMock(HttpRelay.class);
EasyMock.expect(mockRelay.getHttpStatisticsCollector()).andReturn(null).anyTimes();
EasyMock.expect(mockRelay.getSourcesIdNameRegistry()).andReturn(mockSrcIdReg).anyTimes();
EasyMock.expect(mockRelay.getSchemaRegistryService()).andReturn(mockSchemaReg).anyTimes();
EasyMock.replay(mockRelay);
RegisterRequestProcessor reqProcessor = new RegisterRequestProcessor(null, mockRelay);
boolean exceptionCaught = false;
try {
reqProcessor.process(mockReq);
} catch (RequestProcessingException e) {
Assert.assertEquals(expectedCause, e.getCause());
}
EasyMock.verify(mockRelay);
EasyMock.verify(mockReq);
EasyMock.verify(mockSchemaReg);
EasyMock.verify(mockSrcIdReg);
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class GoldenGateEventProducer method registerParserMbean.
private void registerParserMbean(GGParserStatisticsMBean parserBean) throws DatabusException {
try {
Hashtable<String, String> props = new Hashtable<String, String>();
props.put("type", "GGParserStatistics");
props.put("name", _pConfig.getName());
ObjectName objectName = new ObjectName(ServerContainer.JMX_DOMAIN, props);
if (_mbeanServer.isRegistered(objectName)) {
_log.warn("Unregistering old ggparser statistics mbean: " + objectName);
_mbeanServer.unregisterMBean(objectName);
}
_mbeanServer.registerMBean(parserBean, objectName);
_log.info("Registered gg-parser statistics mbean: " + objectName);
_registeredMbeans.add(objectName);
} catch (Exception ex) {
_log.error("Failed to register the GGparser statistics mbean for db = " + _pConfig.getName() + " due to an exception.", ex);
throw new DatabusException("Failed to initialize GGparser statistics mbean.", ex);
}
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class GoldenGateEventProducer method start.
/**
*
* @param sinceSCN
*/
@Override
public synchronized void start(long sinceSCN) {
_log.info("Start golden gate evert producer requested.");
if (_currentState == State.RUNNING) {
_log.error("Thread already running! ");
return;
}
_scn.set(TrailFilePositionSetter.USE_LATEST_SCN);
if (sinceSCN > 0) {
_scn.set(sinceSCN);
} else {
if (getMaxScnReaderWriter() != null) {
try {
long scn = getMaxScnReaderWriter().getMaxScn();
//If the max scn is greater than 0, then honor it.
if (scn > 0) {
//apply the restart SCN offset
long newScn = (scn >= _pConfig.getRestartScnOffset()) ? scn - _pConfig.getRestartScnOffset() : 0;
_log.info("Checkpoint read = " + scn + " restartScnOffset= " + _pConfig.getRestartScnOffset() + " Adjusted SCN= " + newScn);
if (newScn > 0) {
_scn.set(newScn);
}
} else //If the max scn is set to <0, this is a special case that we use to let the trail file notifier that you want to override the default behaviour of starting with the latest scn.
{
_log.info("Overridding default behaviour (start with latest scn), using scn : " + scn + " to start the relay");
if (scn != TrailFilePositionSetter.USE_EARLIEST_SCN && scn != TrailFilePositionSetter.USE_LATEST_SCN)
throw new DatabusException("The scn you have passed is neither EARLIEST or LATEST setting, cannot proceed with using this scn");
_scn.set(scn);
}
} catch (DatabusException e) {
_log.warn("Could not read saved maxScn: Defaulting to startSCN=" + _scn.get());
}
}
}
if (_worker == null) {
_log.info("Starting with scn = " + _scn.get());
_worker = new WorkerThread();
_worker.setDaemon(true);
_worker.start();
}
}
Aggregations