use of com.linkedin.databus2.schemas.SchemaRegistryService in project databus by linkedin.
the class RegisterRequestProcessor method process.
@Override
public DatabusRequest process(DatabusRequest request) throws IOException, RequestProcessingException {
try {
// fail early if optional version param is included but isn't valid
// 2 and 3 are same for us; 4 is a superset only newer clients understand
int registerRequestProtocolVersion = 3;
String registerRequestProtocolVersionStr = request.getParams().getProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM);
if (registerRequestProtocolVersionStr != null) {
try {
registerRequestProtocolVersion = Integer.parseInt(registerRequestProtocolVersionStr);
} catch (NumberFormatException e) {
LOG.error("Could not parse /register request protocol version: " + registerRequestProtocolVersionStr);
throw new InvalidRequestParamValueException(COMMAND_NAME, DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, registerRequestProtocolVersionStr);
}
if (registerRequestProtocolVersion < 2 || registerRequestProtocolVersion > 4) {
LOG.error("Out-of-range /register request protocol version: " + registerRequestProtocolVersionStr);
throw new InvalidRequestParamValueException(COMMAND_NAME, DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, registerRequestProtocolVersionStr);
}
}
Collection<LogicalSource> logicalSources = null;
HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
String sources = request.getParams().getProperty(SOURCES_PARAM);
if (null == sources) {
// need to return all schemas, so first get all sources
logicalSources = _relay.getSourcesIdNameRegistry().getAllSources();
} else {
String[] sourceIds = sources.split(",");
logicalSources = new ArrayList<LogicalSource>(sourceIds.length);
for (String sourceId : sourceIds) {
int srcId;
String trimmedSourceId = sourceId.trim();
try {
srcId = Integer.valueOf(trimmedSourceId);
LogicalSource lsource = _relay.getSourcesIdNameRegistry().getSource(srcId);
if (null != lsource)
logicalSources.add(lsource);
else {
LOG.error("No source name for source id: " + srcId);
throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, sourceId);
}
} catch (NumberFormatException nfe) {
if (relayStatsCollector != null) {
relayStatsCollector.registerInvalidRegisterCall();
}
throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, sourceId);
}
}
}
SchemaRegistryService schemaRegistry = _relay.getSchemaRegistryService();
ArrayList<RegisterResponseEntry> registeredSources = new ArrayList<RegisterResponseEntry>(20);
for (LogicalSource lsource : logicalSources) {
getSchemas(schemaRegistry, lsource.getName(), lsource.getId(), sources, registeredSources);
}
// Note that, as of April 2013, the Espresso sandbox's schema registry
// (in JSON format) is 4.5 MB and growing. But 100 KB is probably OK
// for regular production cases.
StringWriter out = new StringWriter(102400);
ObjectMapper mapper = new ObjectMapper();
// any circumstances under which we might want to override this?
int registerResponseProtocolVersion = registerRequestProtocolVersion;
if (// DDSDBUS-2009
registerRequestProtocolVersion == 4) {
LOG.debug("Got version 4 /register request; fetching metadata schema.");
// Get (replication) metadata schema from registry; format it as list
// of schemas (multiple only if more than one version exists). Per
// https://iwww.corp.linkedin.com/wiki/cf/display/ENGS/Espresso+Metadata+Schema,
// name of replication metadata is simply "metadata".
ArrayList<RegisterResponseMetadataEntry> registeredMetadata = new ArrayList<RegisterResponseMetadataEntry>(2);
getMetadataSchemas(schemaRegistry, registeredMetadata);
// Set up the v4 response as a map: one entry is the existing list of source
// schemas, and the others (if present) are the new lists of metadata schema(s)
// and (TODO) key schemas.
HashMap<String, List<Object>> responseMap = new HashMap<String, List<Object>>(4);
responseMap.put(RegisterResponseEntry.SOURCE_SCHEMAS_KEY, (List<Object>) (List<?>) registeredSources);
if (registeredMetadata.size() > 0) {
LOG.debug("Sending v4 /register response with metadata schema.");
responseMap.put(RegisterResponseMetadataEntry.METADATA_SCHEMAS_KEY, (List<Object>) (List<?>) registeredMetadata);
} else {
LOG.debug("No metadata schema available; sending v4 /register response without.");
}
// TODO: figure out how to retrieve key schemas and include via RegisterResponseEntry.KEY_SCHEMAS_KEY
mapper.writeValue(out, responseMap);
} else // fall back to old style (v2/v3 response)
{
mapper.writeValue(out, registeredSources);
}
ChunkedWritableByteChannel responseContent = request.getResponseContent();
byte[] resultBytes = out.toString().getBytes(Charset.defaultCharset());
responseContent.addMetadata(DatabusHttpHeaders.DBUS_CLIENT_RELAY_PROTOCOL_VERSION_HDR, registerResponseProtocolVersion);
responseContent.write(ByteBuffer.wrap(resultBytes));
if (null != relayStatsCollector) {
HttpStatisticsCollector connStatsCollector = (HttpStatisticsCollector) request.getParams().get(relayStatsCollector.getName());
if (null != connStatsCollector) {
connStatsCollector.registerRegisterCall(registeredSources);
} else {
relayStatsCollector.registerRegisterCall(registeredSources);
}
}
return request;
} catch (InvalidRequestParamValueException e) {
HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
if (null != relayStatsCollector)
relayStatsCollector.registerInvalidRegisterCall();
throw e;
}
}
use of com.linkedin.databus2.schemas.SchemaRegistryService in project databus by linkedin.
the class OracleEventProducerFactory method buildEventProducer.
public EventProducer buildEventProducer(PhysicalSourceStaticConfig physicalSourceConfig, SchemaRegistryService schemaRegistryService, DbusEventBufferAppendable dbusEventBuffer, MBeanServer mbeanServer, DbusEventsStatisticsCollector dbusEventsStatisticsCollector, MaxSCNReaderWriter _maxScnReaderWriter) throws DatabusException, EventCreationException, UnsupportedKeyException, SQLException, InvalidConfigException {
// Make sure the URI from the configuration file identifies an Oracle JDBC source.
String uri = physicalSourceConfig.getUri();
if (!uri.startsWith("jdbc:oracle")) {
throw new InvalidConfigException("Invalid source URI (" + physicalSourceConfig.getUri() + "). Only jdbc:oracle: URIs are supported.");
}
// Parse each one of the logical sources
List<OracleTriggerMonitoredSourceInfo> sources = new ArrayList<OracleTriggerMonitoredSourceInfo>();
for (LogicalSourceStaticConfig sourceConfig : physicalSourceConfig.getSources()) {
OracleTriggerMonitoredSourceInfo source = buildOracleMonitoredSourceInfo(sourceConfig, physicalSourceConfig, schemaRegistryService);
sources.add(source);
}
DataSource ds = null;
try {
ds = OracleJarUtils.createOracleDataSource(uri);
} catch (Exception e) {
String errMsg = "Oracle URI likely not supported. Trouble creating OracleDataSource";
_log.error(errMsg);
throw new InvalidConfigException(errMsg + e.getMessage());
}
// Create the event producer
EventProducer eventProducer = new OracleEventProducer(sources, ds, dbusEventBuffer, true, dbusEventsStatisticsCollector, _maxScnReaderWriter, physicalSourceConfig, ManagementFactory.getPlatformMBeanServer());
_log.info("Created OracleEventProducer for config: " + physicalSourceConfig + " with slowSourceQueryThreshold = " + physicalSourceConfig.getSlowSourceQueryThreshold());
return eventProducer;
}
use of com.linkedin.databus2.schemas.SchemaRegistryService in project databus by linkedin.
the class OracleEventProducerFactory method buildOracleMonitoredSourceInfo.
public OracleTriggerMonitoredSourceInfo buildOracleMonitoredSourceInfo(LogicalSourceStaticConfig sourceConfig, PhysicalSourceStaticConfig pConfig, SchemaRegistryService schemaRegistryService) throws DatabusException, EventCreationException, UnsupportedKeyException, InvalidConfigException {
String schema = null;
try {
schema = schemaRegistryService.fetchLatestSchemaBySourceName(sourceConfig.getName());
} catch (NoSuchSchemaException e) {
throw new InvalidConfigException("Unable to load the schema for source (" + sourceConfig.getName() + ").");
}
if (schema == null) {
throw new InvalidConfigException("Unable to load the schema for source (" + sourceConfig.getName() + ").");
}
_log.info("Loading schema for source id " + sourceConfig.getId() + ": " + schema);
String eventViewSchema;
String eventView;
if (sourceConfig.getUri().indexOf('.') != -1) {
String[] parts = sourceConfig.getUri().split("\\.");
eventViewSchema = parts[0];
eventView = parts[1];
} else {
eventViewSchema = null;
eventView = sourceConfig.getUri();
}
if (eventView.toLowerCase().startsWith("sy$")) {
eventView = eventView.substring(3);
}
PartitionFunction partitionFunction = buildPartitionFunction(sourceConfig);
EventFactory factory = createEventFactory(eventViewSchema, eventView, sourceConfig, pConfig, schema, partitionFunction);
EventSourceStatistics statisticsBean = new EventSourceStatistics(sourceConfig.getName());
OracleTriggerMonitoredSourceInfo sourceInfo = new OracleTriggerMonitoredSourceInfo(sourceConfig.getId(), sourceConfig.getName(), eventViewSchema, eventView, factory, statisticsBean, sourceConfig.getRegularQueryHints(), sourceConfig.getChunkedTxnQueryHints(), sourceConfig.getChunkedScnQueryHints(), sourceConfig.isSkipInfinityScn());
return sourceInfo;
}
use of com.linkedin.databus2.schemas.SchemaRegistryService in project databus by linkedin.
the class TestRegisterRequestProcessor method testRegisterReqProcessorVx.
// Test of happy path when the protocol version is specified as 2 or 3,
// or not specified at all.
// We should send out the source schemas only, and that too as a list.
private void testRegisterReqProcessorVx(final int protoVersion) throws Exception {
LOG.info("Verifying happy path with protocol version: " + protoVersion);
Properties params = new Properties();
final int srcId1 = 101;
final String srcName1 = "source-101";
final String docSchema1 = "docSchema1";
final String docSchema2 = "docSchema2";
final short docSchemaV1 = 1;
final short docSchemaV2 = 2;
if (protoVersion != 0) {
params.setProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, Integer.toString(protoVersion));
}
params.setProperty(RegisterRequestProcessor.SOURCES_PARAM, Integer.toString(srcId1));
final StringBuilder responseStr = new StringBuilder();
ChunkedWritableByteChannel chunkedWritableByteChannel = EasyMock.createMock(ChunkedWritableByteChannel.class);
// We should write out proto-version as 3 if none was specified in the input, otherwise match the proto version
chunkedWritableByteChannel.addMetadata(EasyMock.eq(DatabusHttpHeaders.DBUS_CLIENT_RELAY_PROTOCOL_VERSION_HDR), protoVersion != 0 ? EasyMock.eq(protoVersion) : EasyMock.eq(3));
EasyMock.expectLastCall().times(1);
chunkedWritableByteChannel.write(EasyMock.anyObject(ByteBuffer.class));
EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
Charset charset = Charset.forName("UTF-8");
CharsetDecoder decoder = charset.newDecoder();
responseStr.append(decoder.decode((ByteBuffer) EasyMock.getCurrentArguments()[0]));
return responseStr.length();
}
});
EasyMock.replay(chunkedWritableByteChannel);
DatabusRequest mockReq = EasyMock.createMock(DatabusRequest.class);
EasyMock.expect(mockReq.getParams()).andReturn(params).anyTimes();
EasyMock.expect(mockReq.getResponseContent()).andReturn(chunkedWritableByteChannel);
EasyMock.replay(mockReq);
LogicalSource lsrc1 = new LogicalSource(srcId1, srcName1);
SourceIdNameRegistry mockSrcIdReg = EasyMock.createMock(SourceIdNameRegistry.class);
EasyMock.expect(mockSrcIdReg.getSource(srcId1)).andReturn(lsrc1).anyTimes();
EasyMock.replay(mockSrcIdReg);
Map<Short, String> srcSchemaVersions = new HashMap<Short, String>();
srcSchemaVersions.put(docSchemaV1, docSchema1);
srcSchemaVersions.put(docSchemaV2, docSchema2);
SchemaRegistryService mockSchemaReg = EasyMock.createMock(SchemaRegistryService.class);
EasyMock.expect(mockSchemaReg.fetchAllSchemaVersionsBySourceName(srcName1)).andReturn(srcSchemaVersions).anyTimes();
EasyMock.replay(mockSchemaReg);
HttpRelay mockRelay = EasyMock.createMock(HttpRelay.class);
EasyMock.expect(mockRelay.getHttpStatisticsCollector()).andReturn(null).anyTimes();
EasyMock.expect(mockRelay.getSourcesIdNameRegistry()).andReturn(mockSrcIdReg).anyTimes();
EasyMock.expect(mockRelay.getSchemaRegistryService()).andReturn(mockSchemaReg).anyTimes();
EasyMock.replay(mockRelay);
RegisterRequestProcessor reqProcessor = new RegisterRequestProcessor(null, mockRelay);
reqProcessor.process(mockReq);
ObjectMapper mapper = new ObjectMapper();
List<RegisterResponseEntry> schemasList = mapper.readValue(responseStr.toString(), new TypeReference<List<RegisterResponseEntry>>() {
});
Map<Long, List<RegisterResponseEntry>> sourcesSchemasMap = RegisterResponseEntry.convertSchemaListToMap(schemasList);
// There should be 1 entry in the map.
Assert.assertEquals(1, sourcesSchemasMap.size());
Assert.assertEquals(2, sourcesSchemasMap.get(new Long(srcId1)).size());
for (RegisterResponseEntry r : sourcesSchemasMap.get(new Long(srcId1))) {
Assert.assertEquals(srcId1, r.getId());
if (r.getVersion() == docSchemaV1) {
Assert.assertEquals(docSchema1, r.getSchema());
} else {
Assert.assertEquals(docSchema2, r.getSchema());
}
}
EasyMock.verify(mockRelay);
EasyMock.verify(mockReq);
EasyMock.verify(mockSchemaReg);
EasyMock.verify(mockSrcIdReg);
}
use of com.linkedin.databus2.schemas.SchemaRegistryService in project databus by linkedin.
the class TestRegisterRequestProcessor method testNullSchemasInGetSchemas.
private void testNullSchemasInGetSchemas(final int protoVersion) throws Exception {
LOG.info("Testing null return from fetchAllSchemaVersionsBySourceName() with protoversion " + protoVersion);
Properties params = new Properties();
final int srcId1 = 101;
final String srcName1 = "source-101";
if (protoVersion != 0) {
params.setProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, Integer.toString(protoVersion));
}
params.setProperty(RegisterRequestProcessor.SOURCES_PARAM, Integer.toString(srcId1));
final StringBuilder responseStr = new StringBuilder();
ChunkedWritableByteChannel chunkedWritableByteChannel = EasyMock.createMock(ChunkedWritableByteChannel.class);
// We should write out proto-version as 3 if none was specified in the input, otherwise match the proto version
chunkedWritableByteChannel.addMetadata(EasyMock.eq(DatabusHttpHeaders.DBUS_CLIENT_RELAY_PROTOCOL_VERSION_HDR), protoVersion != 0 ? EasyMock.eq(protoVersion) : EasyMock.eq(3));
EasyMock.expectLastCall().times(1);
chunkedWritableByteChannel.write(EasyMock.anyObject(ByteBuffer.class));
EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
Charset charset = Charset.forName("UTF-8");
CharsetDecoder decoder = charset.newDecoder();
responseStr.append(decoder.decode((ByteBuffer) EasyMock.getCurrentArguments()[0]));
return responseStr.length();
}
});
EasyMock.replay(chunkedWritableByteChannel);
DatabusRequest mockReq = EasyMock.createMock(DatabusRequest.class);
EasyMock.expect(mockReq.getParams()).andReturn(params).anyTimes();
EasyMock.expect(mockReq.getResponseContent()).andReturn(chunkedWritableByteChannel);
EasyMock.replay(mockReq);
LogicalSource lsrc1 = new LogicalSource(srcId1, srcName1);
SourceIdNameRegistry mockSrcIdReg = EasyMock.createMock(SourceIdNameRegistry.class);
EasyMock.expect(mockSrcIdReg.getSource(srcId1)).andReturn(lsrc1).anyTimes();
EasyMock.replay(mockSrcIdReg);
SchemaRegistryService mockSchemaReg = EasyMock.createMock(SchemaRegistryService.class);
EasyMock.expect(mockSchemaReg.fetchAllSchemaVersionsBySourceName(srcName1)).andReturn(null);
EasyMock.replay(mockSchemaReg);
HttpRelay mockRelay = EasyMock.createMock(HttpRelay.class);
EasyMock.expect(mockRelay.getHttpStatisticsCollector()).andReturn(null).anyTimes();
EasyMock.expect(mockRelay.getSourcesIdNameRegistry()).andReturn(mockSrcIdReg).anyTimes();
EasyMock.expect(mockRelay.getSchemaRegistryService()).andReturn(mockSchemaReg).anyTimes();
EasyMock.replay(mockRelay);
RegisterRequestProcessor reqProcessor = new RegisterRequestProcessor(null, mockRelay);
reqProcessor.process(mockReq);
ObjectMapper mapper = new ObjectMapper();
List<RegisterResponseEntry> schemasList = mapper.readValue(responseStr.toString(), new TypeReference<List<RegisterResponseEntry>>() {
});
Map<Long, List<RegisterResponseEntry>> sourcesSchemasMap = RegisterResponseEntry.convertSchemaListToMap(schemasList);
// There should be 1 entry in the map.
Assert.assertEquals(0, sourcesSchemasMap.size());
EasyMock.verify(mockRelay);
EasyMock.verify(mockReq);
EasyMock.verify(mockSchemaReg);
EasyMock.verify(mockSrcIdReg);
}
Aggregations