use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class ColumnsState method constructPkeys.
/**
* Constructs the primary key pair and stores it in the current state
* @param eventFields Map containing dbFieldName => fieldValue
* @param pkFieldName The name of primaryKey field (comma seperated list). e.g., if the schema has pk=id, member_id. Then, pkFieldName is id, member_id.
* This is used only for logging purpose
* @param pk The primary key object stored as an class object
* @param field The current field being processed(Avro)
* @param databaseFieldName Field being processed(oracle name)
* @param fieldValueObj Value of the current field being processed
* @throws DatabusException
*/
private void constructPkeys(HashMap<String, ColumnState.EventField> eventFields, String pkFieldName, PrimaryKey pk, Schema.Field field, String databaseFieldName, Object fieldValueObj) throws DatabusException {
if (eventFields.get(databaseFieldName).isKey()) {
if (!pk.isPartOfPrimaryKey(field))
throw new DatabusException("The primary key is not as expected. Expected: " + pkFieldName + " found from xml: " + field.name());
if (fieldValueObj == null)
throw new DatabusException("Unable to find the value of the object");
Schema.Type pkFieldType = SchemaHelper.unwindUnionSchema(field).getType();
KeyPair pair = new KeyPair(fieldValueObj, pkFieldType);
_keyPairs.add(pair);
}
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class OracleEventProducerFactory method buildEventProducer.
public EventProducer buildEventProducer(PhysicalSourceStaticConfig physicalSourceConfig, SchemaRegistryService schemaRegistryService, DbusEventBufferAppendable dbusEventBuffer, MBeanServer mbeanServer, DbusEventsStatisticsCollector dbusEventsStatisticsCollector, MaxSCNReaderWriter _maxScnReaderWriter) throws DatabusException, EventCreationException, UnsupportedKeyException, SQLException, InvalidConfigException {
// Make sure the URI from the configuration file identifies an Oracle JDBC source.
String uri = physicalSourceConfig.getUri();
if (!uri.startsWith("jdbc:oracle")) {
throw new InvalidConfigException("Invalid source URI (" + physicalSourceConfig.getUri() + "). Only jdbc:oracle: URIs are supported.");
}
// Parse each one of the logical sources
List<OracleTriggerMonitoredSourceInfo> sources = new ArrayList<OracleTriggerMonitoredSourceInfo>();
for (LogicalSourceStaticConfig sourceConfig : physicalSourceConfig.getSources()) {
OracleTriggerMonitoredSourceInfo source = buildOracleMonitoredSourceInfo(sourceConfig, physicalSourceConfig, schemaRegistryService);
sources.add(source);
}
DataSource ds = null;
try {
ds = OracleJarUtils.createOracleDataSource(uri);
} catch (Exception e) {
String errMsg = "Oracle URI likely not supported. Trouble creating OracleDataSource";
_log.error(errMsg);
throw new InvalidConfigException(errMsg + e.getMessage());
}
// Create the event producer
EventProducer eventProducer = new OracleEventProducer(sources, ds, dbusEventBuffer, true, dbusEventsStatisticsCollector, _maxScnReaderWriter, physicalSourceConfig, ManagementFactory.getPlatformMBeanServer());
_log.info("Created OracleEventProducer for config: " + physicalSourceConfig + " with slowSourceQueryThreshold = " + physicalSourceConfig.getSlowSourceQueryThreshold());
return eventProducer;
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class OracleEventProducerFactory method buildOracleMonitoredSourceInfo.
public OracleTriggerMonitoredSourceInfo buildOracleMonitoredSourceInfo(LogicalSourceStaticConfig sourceConfig, PhysicalSourceStaticConfig pConfig, SchemaRegistryService schemaRegistryService) throws DatabusException, EventCreationException, UnsupportedKeyException, InvalidConfigException {
String schema = null;
try {
schema = schemaRegistryService.fetchLatestSchemaBySourceName(sourceConfig.getName());
} catch (NoSuchSchemaException e) {
throw new InvalidConfigException("Unable to load the schema for source (" + sourceConfig.getName() + ").");
}
if (schema == null) {
throw new InvalidConfigException("Unable to load the schema for source (" + sourceConfig.getName() + ").");
}
_log.info("Loading schema for source id " + sourceConfig.getId() + ": " + schema);
String eventViewSchema;
String eventView;
if (sourceConfig.getUri().indexOf('.') != -1) {
String[] parts = sourceConfig.getUri().split("\\.");
eventViewSchema = parts[0];
eventView = parts[1];
} else {
eventViewSchema = null;
eventView = sourceConfig.getUri();
}
if (eventView.toLowerCase().startsWith("sy$")) {
eventView = eventView.substring(3);
}
PartitionFunction partitionFunction = buildPartitionFunction(sourceConfig);
EventFactory factory = createEventFactory(eventViewSchema, eventView, sourceConfig, pConfig, schema, partitionFunction);
EventSourceStatistics statisticsBean = new EventSourceStatistics(sourceConfig.getName());
OracleTriggerMonitoredSourceInfo sourceInfo = new OracleTriggerMonitoredSourceInfo(sourceConfig.getId(), sourceConfig.getName(), eventViewSchema, eventView, factory, statisticsBean, sourceConfig.getRegularQueryHints(), sourceConfig.getChunkedTxnQueryHints(), sourceConfig.getChunkedScnQueryHints(), sourceConfig.isSkipInfinityScn());
return sourceInfo;
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class DatabusRelayTestUtil method createDatabusRelay.
public static DatabusRelayMain createDatabusRelay(PhysicalSourceConfig[] sourceConfigs, HttpRelay.Config httpRelayConfig) throws IOException, DatabusException {
PhysicalSourceStaticConfig[] pStaticConfigs = new PhysicalSourceStaticConfig[sourceConfigs.length];
int i = 0;
for (PhysicalSourceConfig pConf : sourceConfigs) {
// prefixed to the id or what?
for (LogicalSourceConfig lsc : pConf.getSources()) {
httpRelayConfig.setSourceName("" + lsc.getId(), lsc.getName());
}
pStaticConfigs[i++] = pConf.build();
}
DatabusRelayMain relayMain = new DatabusRelayMain(httpRelayConfig.build(), pStaticConfigs);
return relayMain;
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class DatabusRelayMain method addOneProducer.
/** overrides HTTP relay method */
@Override
public void addOneProducer(PhysicalSourceStaticConfig pConfig) throws DatabusException, EventCreationException, UnsupportedKeyException, SQLException, InvalidConfigException {
// Register a command to allow start/stop/status of the relay
List<EventProducer> plist = new ArrayList<EventProducer>();
PhysicalPartition pPartition = pConfig.getPhysicalPartition();
MaxSCNReaderWriter maxScnReaderWriters = _maxScnReaderWriters.getOrCreateHandler(pPartition);
LOG.info("Starting server container with maxScnReaderWriter:" + maxScnReaderWriters);
// Get the event buffer
DbusEventBufferAppendable dbusEventBuffer = getEventBuffer().getDbusEventBufferAppendable(pPartition);
// Get the schema registry service
SchemaRegistryService schemaRegistryService = getSchemaRegistryService();
// Get a stats collector per physical source
addPhysicalPartitionCollectors(pPartition);
String statsCollectorName = pPartition.toSimpleString();
/*
* _inBoundStatsCollectors.addStatsCollector(statsCollectorName, new
* DbusEventsStatisticsCollector(getContainerStaticConfig().getId(),
* statsCollectorName+".inbound", true, false, getMbeanServer()));
*
* _outBoundStatsCollectors.addStatsCollector(statsCollectorName, new
* DbusEventsStatisticsCollector(getContainerStaticConfig().getId(),
* statsCollectorName+".outbound", true, false, getMbeanServer()));
*/
// Create the event producer
String uri = pConfig.getUri();
if (uri == null)
throw new DatabusException("Uri is required to start the relay");
uri = uri.trim();
EventProducer producer = null;
if (uri.startsWith("jdbc:")) {
SourceType sourceType = pConfig.getReplBitSetter().getSourceType();
if (SourceType.TOKEN.equals(sourceType))
throw new DatabusException("Token Source-type for Replication bit setter config cannot be set for trigger-based Databus relay !!");
// if a buffer for this partiton exists - we are overwri
producer = new OracleEventProducerFactory().buildEventProducer(pConfig, schemaRegistryService, dbusEventBuffer, getMbeanServer(), _inBoundStatsCollectors.getStatsCollector(statsCollectorName), maxScnReaderWriters);
} else if (uri.startsWith("mock")) {
// Get all relevant pConfig attributes
//TODO add real instantiation
EventProducerServiceProvider mockProvider = _producersRegistry.getEventProducerServiceProvider("mock");
if (null == mockProvider) {
throw new DatabusRuntimeException("relay event producer not available: " + "mock");
}
producer = mockProvider.createProducer(pConfig, schemaRegistryService, dbusEventBuffer, _inBoundStatsCollectors.getStatsCollector(statsCollectorName), maxScnReaderWriters);
} else if (uri.startsWith("gg:")) {
producer = new GoldenGateEventProducer(pConfig, schemaRegistryService, dbusEventBuffer, _inBoundStatsCollectors.getStatsCollector(statsCollectorName), maxScnReaderWriters);
} else if (uri.startsWith("mysql:")) {
LOG.info("Adding OpenReplicatorEventProducer for uri :" + uri);
final String serviceName = "or";
EventProducerServiceProvider orProvider = _producersRegistry.getEventProducerServiceProvider(serviceName);
if (null == orProvider) {
throw new DatabusRuntimeException("relay event producer not available: " + serviceName);
}
producer = orProvider.createProducer(pConfig, schemaRegistryService, dbusEventBuffer, _inBoundStatsCollectors.getStatsCollector(statsCollectorName), maxScnReaderWriters);
} else {
// Get all relevant pConfig attributes and initialize the nettyThreadPool objects
RelayEventProducer.DatabusClientNettyThreadPools nettyThreadPools = new RelayEventProducer.DatabusClientNettyThreadPools(0, getNetworkTimeoutTimer(), getBossExecutorService(), getIoExecutorService(), getHttpChannelGroup());
producer = new RelayEventProducer(pConfig, dbusEventBuffer, _inBoundStatsCollectors.getStatsCollector(statsCollectorName), maxScnReaderWriters, nettyThreadPools);
}
// if a buffer for this partiton exists - we are overwriting it.
_producers.put(pPartition, producer);
plist.add(producer);
// append 'monitoring event producer'
if (producer instanceof OracleEventProducer) {
MonitoringEventProducer monitoringProducer = new MonitoringEventProducer("dbMonitor." + pPartition.toSimpleString(), pConfig.getName(), pConfig.getUri(), ((OracleEventProducer) producer).getMonitoredSourceInfos(), getMbeanServer());
_monitoringProducers.put(pPartition, monitoringProducer);
plist.add(monitoringProducer);
}
if (_csEventRequestProcessor == null)
_csEventRequestProcessor = new ControlSourceEventsRequestProcessor(null, this, plist);
else
_csEventRequestProcessor.addEventProducers(plist);
RequestProcessorRegistry processorRegistry = getProcessorRegistry();
processorRegistry.reregister(ControlSourceEventsRequestProcessor.COMMAND_NAME, _csEventRequestProcessor);
}
Aggregations