Search in sources :

Example 6 with SchemaRegistryService

use of com.linkedin.databus2.schemas.SchemaRegistryService in project databus by linkedin.

the class RegisterRequestProcessor method process.

@Override
public DatabusRequest process(DatabusRequest request) throws IOException, RequestProcessingException {
    try {
        // fail early if optional version param is included but isn't valid
        // 2 and 3 are same for us; 4 is a superset only newer clients understand
        int registerRequestProtocolVersion = 3;
        String registerRequestProtocolVersionStr = request.getParams().getProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM);
        if (registerRequestProtocolVersionStr != null) {
            try {
                registerRequestProtocolVersion = Integer.parseInt(registerRequestProtocolVersionStr);
            } catch (NumberFormatException e) {
                LOG.error("Could not parse /register request protocol version: " + registerRequestProtocolVersionStr);
                throw new InvalidRequestParamValueException(COMMAND_NAME, DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, registerRequestProtocolVersionStr);
            }
            if (registerRequestProtocolVersion < 2 || registerRequestProtocolVersion > 4) {
                LOG.error("Out-of-range /register request protocol version: " + registerRequestProtocolVersionStr);
                throw new InvalidRequestParamValueException(COMMAND_NAME, DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, registerRequestProtocolVersionStr);
            }
        }
        Collection<LogicalSource> logicalSources = null;
        HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
        String sources = request.getParams().getProperty(SOURCES_PARAM);
        if (null == sources) {
            // need to return all schemas, so first get all sources
            logicalSources = _relay.getSourcesIdNameRegistry().getAllSources();
        } else {
            String[] sourceIds = sources.split(",");
            logicalSources = new ArrayList<LogicalSource>(sourceIds.length);
            for (String sourceId : sourceIds) {
                int srcId;
                String trimmedSourceId = sourceId.trim();
                try {
                    srcId = Integer.valueOf(trimmedSourceId);
                    LogicalSource lsource = _relay.getSourcesIdNameRegistry().getSource(srcId);
                    if (null != lsource)
                        logicalSources.add(lsource);
                    else {
                        LOG.error("No source name for source id: " + srcId);
                        throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, sourceId);
                    }
                } catch (NumberFormatException nfe) {
                    if (relayStatsCollector != null) {
                        relayStatsCollector.registerInvalidRegisterCall();
                    }
                    throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, sourceId);
                }
            }
        }
        SchemaRegistryService schemaRegistry = _relay.getSchemaRegistryService();
        ArrayList<RegisterResponseEntry> registeredSources = new ArrayList<RegisterResponseEntry>(20);
        for (LogicalSource lsource : logicalSources) {
            getSchemas(schemaRegistry, lsource.getName(), lsource.getId(), sources, registeredSources);
        }
        // Note that, as of April 2013, the Espresso sandbox's schema registry
        // (in JSON format) is 4.5 MB and growing.  But 100 KB is probably OK
        // for regular production cases.
        StringWriter out = new StringWriter(102400);
        ObjectMapper mapper = new ObjectMapper();
        // any circumstances under which we might want to override this?
        int registerResponseProtocolVersion = registerRequestProtocolVersion;
        if (// DDSDBUS-2009
        registerRequestProtocolVersion == 4) {
            LOG.debug("Got version 4 /register request; fetching metadata schema.");
            // Get (replication) metadata schema from registry; format it as list
            // of schemas (multiple only if more than one version exists).  Per
            // https://iwww.corp.linkedin.com/wiki/cf/display/ENGS/Espresso+Metadata+Schema,
            // name of replication metadata is simply "metadata".
            ArrayList<RegisterResponseMetadataEntry> registeredMetadata = new ArrayList<RegisterResponseMetadataEntry>(2);
            getMetadataSchemas(schemaRegistry, registeredMetadata);
            // Set up the v4 response as a map:  one entry is the existing list of source
            // schemas, and the others (if present) are the new lists of metadata schema(s)
            // and (TODO) key schemas.
            HashMap<String, List<Object>> responseMap = new HashMap<String, List<Object>>(4);
            responseMap.put(RegisterResponseEntry.SOURCE_SCHEMAS_KEY, (List<Object>) (List<?>) registeredSources);
            if (registeredMetadata.size() > 0) {
                LOG.debug("Sending v4 /register response with metadata schema.");
                responseMap.put(RegisterResponseMetadataEntry.METADATA_SCHEMAS_KEY, (List<Object>) (List<?>) registeredMetadata);
            } else {
                LOG.debug("No metadata schema available; sending v4 /register response without.");
            }
            // TODO:  figure out how to retrieve key schemas and include via RegisterResponseEntry.KEY_SCHEMAS_KEY
            mapper.writeValue(out, responseMap);
        } else // fall back to old style (v2/v3 response)
        {
            mapper.writeValue(out, registeredSources);
        }
        ChunkedWritableByteChannel responseContent = request.getResponseContent();
        byte[] resultBytes = out.toString().getBytes(Charset.defaultCharset());
        responseContent.addMetadata(DatabusHttpHeaders.DBUS_CLIENT_RELAY_PROTOCOL_VERSION_HDR, registerResponseProtocolVersion);
        responseContent.write(ByteBuffer.wrap(resultBytes));
        if (null != relayStatsCollector) {
            HttpStatisticsCollector connStatsCollector = (HttpStatisticsCollector) request.getParams().get(relayStatsCollector.getName());
            if (null != connStatsCollector) {
                connStatsCollector.registerRegisterCall(registeredSources);
            } else {
                relayStatsCollector.registerRegisterCall(registeredSources);
            }
        }
        return request;
    } catch (InvalidRequestParamValueException e) {
        HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
        if (null != relayStatsCollector)
            relayStatsCollector.registerInvalidRegisterCall();
        throw e;
    }
}
Also used : ChunkedWritableByteChannel(com.linkedin.databus2.core.container.ChunkedWritableByteChannel) HashMap(java.util.HashMap) SchemaRegistryService(com.linkedin.databus2.schemas.SchemaRegistryService) ArrayList(java.util.ArrayList) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) InvalidRequestParamValueException(com.linkedin.databus2.core.container.request.InvalidRequestParamValueException) StringWriter(java.io.StringWriter) RegisterResponseMetadataEntry(com.linkedin.databus2.core.container.request.RegisterResponseMetadataEntry) HttpStatisticsCollector(com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector) RegisterResponseEntry(com.linkedin.databus2.core.container.request.RegisterResponseEntry) ArrayList(java.util.ArrayList) List(java.util.List) ObjectMapper(org.codehaus.jackson.map.ObjectMapper)

Example 7 with SchemaRegistryService

use of com.linkedin.databus2.schemas.SchemaRegistryService in project databus by linkedin.

the class OracleEventProducerFactory method buildEventProducer.

public EventProducer buildEventProducer(PhysicalSourceStaticConfig physicalSourceConfig, SchemaRegistryService schemaRegistryService, DbusEventBufferAppendable dbusEventBuffer, MBeanServer mbeanServer, DbusEventsStatisticsCollector dbusEventsStatisticsCollector, MaxSCNReaderWriter _maxScnReaderWriter) throws DatabusException, EventCreationException, UnsupportedKeyException, SQLException, InvalidConfigException {
    // Make sure the URI from the configuration file identifies an Oracle JDBC source.
    String uri = physicalSourceConfig.getUri();
    if (!uri.startsWith("jdbc:oracle")) {
        throw new InvalidConfigException("Invalid source URI (" + physicalSourceConfig.getUri() + "). Only jdbc:oracle: URIs are supported.");
    }
    // Parse each one of the logical sources
    List<OracleTriggerMonitoredSourceInfo> sources = new ArrayList<OracleTriggerMonitoredSourceInfo>();
    for (LogicalSourceStaticConfig sourceConfig : physicalSourceConfig.getSources()) {
        OracleTriggerMonitoredSourceInfo source = buildOracleMonitoredSourceInfo(sourceConfig, physicalSourceConfig, schemaRegistryService);
        sources.add(source);
    }
    DataSource ds = null;
    try {
        ds = OracleJarUtils.createOracleDataSource(uri);
    } catch (Exception e) {
        String errMsg = "Oracle URI likely not supported. Trouble creating OracleDataSource";
        _log.error(errMsg);
        throw new InvalidConfigException(errMsg + e.getMessage());
    }
    // Create the event producer
    EventProducer eventProducer = new OracleEventProducer(sources, ds, dbusEventBuffer, true, dbusEventsStatisticsCollector, _maxScnReaderWriter, physicalSourceConfig, ManagementFactory.getPlatformMBeanServer());
    _log.info("Created OracleEventProducer for config:  " + physicalSourceConfig + " with slowSourceQueryThreshold = " + physicalSourceConfig.getSlowSourceQueryThreshold());
    return eventProducer;
}
Also used : ArrayList(java.util.ArrayList) OracleEventProducer(com.linkedin.databus2.producers.db.OracleEventProducer) EventProducer(com.linkedin.databus2.producers.EventProducer) OracleEventProducer(com.linkedin.databus2.producers.db.OracleEventProducer) LogicalSourceStaticConfig(com.linkedin.databus2.relay.config.LogicalSourceStaticConfig) InvalidConfigException(com.linkedin.databus.core.util.InvalidConfigException) UnsupportedKeyException(com.linkedin.databus.core.UnsupportedKeyException) SQLException(java.sql.SQLException) NoSuchSchemaException(com.linkedin.databus2.schemas.NoSuchSchemaException) EventCreationException(com.linkedin.databus2.producers.EventCreationException) DatabusException(com.linkedin.databus2.core.DatabusException) InvalidConfigException(com.linkedin.databus.core.util.InvalidConfigException) OracleTriggerMonitoredSourceInfo(com.linkedin.databus2.producers.db.OracleTriggerMonitoredSourceInfo) DataSource(javax.sql.DataSource)

Example 8 with SchemaRegistryService

use of com.linkedin.databus2.schemas.SchemaRegistryService in project databus by linkedin.

the class OracleEventProducerFactory method buildOracleMonitoredSourceInfo.

public OracleTriggerMonitoredSourceInfo buildOracleMonitoredSourceInfo(LogicalSourceStaticConfig sourceConfig, PhysicalSourceStaticConfig pConfig, SchemaRegistryService schemaRegistryService) throws DatabusException, EventCreationException, UnsupportedKeyException, InvalidConfigException {
    String schema = null;
    try {
        schema = schemaRegistryService.fetchLatestSchemaBySourceName(sourceConfig.getName());
    } catch (NoSuchSchemaException e) {
        throw new InvalidConfigException("Unable to load the schema for source (" + sourceConfig.getName() + ").");
    }
    if (schema == null) {
        throw new InvalidConfigException("Unable to load the schema for source (" + sourceConfig.getName() + ").");
    }
    _log.info("Loading schema for source id " + sourceConfig.getId() + ": " + schema);
    String eventViewSchema;
    String eventView;
    if (sourceConfig.getUri().indexOf('.') != -1) {
        String[] parts = sourceConfig.getUri().split("\\.");
        eventViewSchema = parts[0];
        eventView = parts[1];
    } else {
        eventViewSchema = null;
        eventView = sourceConfig.getUri();
    }
    if (eventView.toLowerCase().startsWith("sy$")) {
        eventView = eventView.substring(3);
    }
    PartitionFunction partitionFunction = buildPartitionFunction(sourceConfig);
    EventFactory factory = createEventFactory(eventViewSchema, eventView, sourceConfig, pConfig, schema, partitionFunction);
    EventSourceStatistics statisticsBean = new EventSourceStatistics(sourceConfig.getName());
    OracleTriggerMonitoredSourceInfo sourceInfo = new OracleTriggerMonitoredSourceInfo(sourceConfig.getId(), sourceConfig.getName(), eventViewSchema, eventView, factory, statisticsBean, sourceConfig.getRegularQueryHints(), sourceConfig.getChunkedTxnQueryHints(), sourceConfig.getChunkedScnQueryHints(), sourceConfig.isSkipInfinityScn());
    return sourceInfo;
}
Also used : ConstantPartitionFunction(com.linkedin.databus2.producers.ConstantPartitionFunction) PartitionFunction(com.linkedin.databus2.producers.PartitionFunction) NoSuchSchemaException(com.linkedin.databus2.schemas.NoSuchSchemaException) OracleAvroGenericEventFactory(com.linkedin.databus2.producers.db.OracleAvroGenericEventFactory) EventFactory(com.linkedin.databus2.producers.db.EventFactory) InvalidConfigException(com.linkedin.databus.core.util.InvalidConfigException) EventSourceStatistics(com.linkedin.databus.monitoring.mbean.EventSourceStatistics) OracleTriggerMonitoredSourceInfo(com.linkedin.databus2.producers.db.OracleTriggerMonitoredSourceInfo)

Example 9 with SchemaRegistryService

use of com.linkedin.databus2.schemas.SchemaRegistryService in project databus by linkedin.

the class TestRegisterRequestProcessor method testRegisterReqProcessorVx.

// Test of happy path when the protocol version is specified as 2 or 3,
// or not specified at all.
// We should send out the source schemas only, and that too as a list.
private void testRegisterReqProcessorVx(final int protoVersion) throws Exception {
    LOG.info("Verifying happy path with protocol version: " + protoVersion);
    Properties params = new Properties();
    final int srcId1 = 101;
    final String srcName1 = "source-101";
    final String docSchema1 = "docSchema1";
    final String docSchema2 = "docSchema2";
    final short docSchemaV1 = 1;
    final short docSchemaV2 = 2;
    if (protoVersion != 0) {
        params.setProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, Integer.toString(protoVersion));
    }
    params.setProperty(RegisterRequestProcessor.SOURCES_PARAM, Integer.toString(srcId1));
    final StringBuilder responseStr = new StringBuilder();
    ChunkedWritableByteChannel chunkedWritableByteChannel = EasyMock.createMock(ChunkedWritableByteChannel.class);
    // We should write out proto-version as 3 if none was specified in the input, otherwise match the proto version
    chunkedWritableByteChannel.addMetadata(EasyMock.eq(DatabusHttpHeaders.DBUS_CLIENT_RELAY_PROTOCOL_VERSION_HDR), protoVersion != 0 ? EasyMock.eq(protoVersion) : EasyMock.eq(3));
    EasyMock.expectLastCall().times(1);
    chunkedWritableByteChannel.write(EasyMock.anyObject(ByteBuffer.class));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            Charset charset = Charset.forName("UTF-8");
            CharsetDecoder decoder = charset.newDecoder();
            responseStr.append(decoder.decode((ByteBuffer) EasyMock.getCurrentArguments()[0]));
            return responseStr.length();
        }
    });
    EasyMock.replay(chunkedWritableByteChannel);
    DatabusRequest mockReq = EasyMock.createMock(DatabusRequest.class);
    EasyMock.expect(mockReq.getParams()).andReturn(params).anyTimes();
    EasyMock.expect(mockReq.getResponseContent()).andReturn(chunkedWritableByteChannel);
    EasyMock.replay(mockReq);
    LogicalSource lsrc1 = new LogicalSource(srcId1, srcName1);
    SourceIdNameRegistry mockSrcIdReg = EasyMock.createMock(SourceIdNameRegistry.class);
    EasyMock.expect(mockSrcIdReg.getSource(srcId1)).andReturn(lsrc1).anyTimes();
    EasyMock.replay(mockSrcIdReg);
    Map<Short, String> srcSchemaVersions = new HashMap<Short, String>();
    srcSchemaVersions.put(docSchemaV1, docSchema1);
    srcSchemaVersions.put(docSchemaV2, docSchema2);
    SchemaRegistryService mockSchemaReg = EasyMock.createMock(SchemaRegistryService.class);
    EasyMock.expect(mockSchemaReg.fetchAllSchemaVersionsBySourceName(srcName1)).andReturn(srcSchemaVersions).anyTimes();
    EasyMock.replay(mockSchemaReg);
    HttpRelay mockRelay = EasyMock.createMock(HttpRelay.class);
    EasyMock.expect(mockRelay.getHttpStatisticsCollector()).andReturn(null).anyTimes();
    EasyMock.expect(mockRelay.getSourcesIdNameRegistry()).andReturn(mockSrcIdReg).anyTimes();
    EasyMock.expect(mockRelay.getSchemaRegistryService()).andReturn(mockSchemaReg).anyTimes();
    EasyMock.replay(mockRelay);
    RegisterRequestProcessor reqProcessor = new RegisterRequestProcessor(null, mockRelay);
    reqProcessor.process(mockReq);
    ObjectMapper mapper = new ObjectMapper();
    List<RegisterResponseEntry> schemasList = mapper.readValue(responseStr.toString(), new TypeReference<List<RegisterResponseEntry>>() {
    });
    Map<Long, List<RegisterResponseEntry>> sourcesSchemasMap = RegisterResponseEntry.convertSchemaListToMap(schemasList);
    // There should be 1 entry in the map.
    Assert.assertEquals(1, sourcesSchemasMap.size());
    Assert.assertEquals(2, sourcesSchemasMap.get(new Long(srcId1)).size());
    for (RegisterResponseEntry r : sourcesSchemasMap.get(new Long(srcId1))) {
        Assert.assertEquals(srcId1, r.getId());
        if (r.getVersion() == docSchemaV1) {
            Assert.assertEquals(docSchema1, r.getSchema());
        } else {
            Assert.assertEquals(docSchema2, r.getSchema());
        }
    }
    EasyMock.verify(mockRelay);
    EasyMock.verify(mockReq);
    EasyMock.verify(mockSchemaReg);
    EasyMock.verify(mockSrcIdReg);
}
Also used : ChunkedWritableByteChannel(com.linkedin.databus2.core.container.ChunkedWritableByteChannel) HashMap(java.util.HashMap) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) Properties(java.util.Properties) List(java.util.List) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) CharsetDecoder(java.nio.charset.CharsetDecoder) SchemaRegistryService(com.linkedin.databus2.schemas.SchemaRegistryService) Charset(java.nio.charset.Charset) SourceIdNameRegistry(com.linkedin.databus2.schemas.SourceIdNameRegistry) ByteBuffer(java.nio.ByteBuffer) DatabusRequest(com.linkedin.databus2.core.container.request.DatabusRequest) RegisterRequestProcessor(com.linkedin.databus.container.request.RegisterRequestProcessor) RegisterResponseEntry(com.linkedin.databus2.core.container.request.RegisterResponseEntry)

Example 10 with SchemaRegistryService

use of com.linkedin.databus2.schemas.SchemaRegistryService in project databus by linkedin.

the class TestRegisterRequestProcessor method testNullSchemasInGetSchemas.

private void testNullSchemasInGetSchemas(final int protoVersion) throws Exception {
    LOG.info("Testing null return from fetchAllSchemaVersionsBySourceName() with protoversion " + protoVersion);
    Properties params = new Properties();
    final int srcId1 = 101;
    final String srcName1 = "source-101";
    if (protoVersion != 0) {
        params.setProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, Integer.toString(protoVersion));
    }
    params.setProperty(RegisterRequestProcessor.SOURCES_PARAM, Integer.toString(srcId1));
    final StringBuilder responseStr = new StringBuilder();
    ChunkedWritableByteChannel chunkedWritableByteChannel = EasyMock.createMock(ChunkedWritableByteChannel.class);
    // We should write out proto-version as 3 if none was specified in the input, otherwise match the proto version
    chunkedWritableByteChannel.addMetadata(EasyMock.eq(DatabusHttpHeaders.DBUS_CLIENT_RELAY_PROTOCOL_VERSION_HDR), protoVersion != 0 ? EasyMock.eq(protoVersion) : EasyMock.eq(3));
    EasyMock.expectLastCall().times(1);
    chunkedWritableByteChannel.write(EasyMock.anyObject(ByteBuffer.class));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            Charset charset = Charset.forName("UTF-8");
            CharsetDecoder decoder = charset.newDecoder();
            responseStr.append(decoder.decode((ByteBuffer) EasyMock.getCurrentArguments()[0]));
            return responseStr.length();
        }
    });
    EasyMock.replay(chunkedWritableByteChannel);
    DatabusRequest mockReq = EasyMock.createMock(DatabusRequest.class);
    EasyMock.expect(mockReq.getParams()).andReturn(params).anyTimes();
    EasyMock.expect(mockReq.getResponseContent()).andReturn(chunkedWritableByteChannel);
    EasyMock.replay(mockReq);
    LogicalSource lsrc1 = new LogicalSource(srcId1, srcName1);
    SourceIdNameRegistry mockSrcIdReg = EasyMock.createMock(SourceIdNameRegistry.class);
    EasyMock.expect(mockSrcIdReg.getSource(srcId1)).andReturn(lsrc1).anyTimes();
    EasyMock.replay(mockSrcIdReg);
    SchemaRegistryService mockSchemaReg = EasyMock.createMock(SchemaRegistryService.class);
    EasyMock.expect(mockSchemaReg.fetchAllSchemaVersionsBySourceName(srcName1)).andReturn(null);
    EasyMock.replay(mockSchemaReg);
    HttpRelay mockRelay = EasyMock.createMock(HttpRelay.class);
    EasyMock.expect(mockRelay.getHttpStatisticsCollector()).andReturn(null).anyTimes();
    EasyMock.expect(mockRelay.getSourcesIdNameRegistry()).andReturn(mockSrcIdReg).anyTimes();
    EasyMock.expect(mockRelay.getSchemaRegistryService()).andReturn(mockSchemaReg).anyTimes();
    EasyMock.replay(mockRelay);
    RegisterRequestProcessor reqProcessor = new RegisterRequestProcessor(null, mockRelay);
    reqProcessor.process(mockReq);
    ObjectMapper mapper = new ObjectMapper();
    List<RegisterResponseEntry> schemasList = mapper.readValue(responseStr.toString(), new TypeReference<List<RegisterResponseEntry>>() {
    });
    Map<Long, List<RegisterResponseEntry>> sourcesSchemasMap = RegisterResponseEntry.convertSchemaListToMap(schemasList);
    // There should be 1 entry in the map.
    Assert.assertEquals(0, sourcesSchemasMap.size());
    EasyMock.verify(mockRelay);
    EasyMock.verify(mockReq);
    EasyMock.verify(mockSchemaReg);
    EasyMock.verify(mockSrcIdReg);
}
Also used : ChunkedWritableByteChannel(com.linkedin.databus2.core.container.ChunkedWritableByteChannel) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) Properties(java.util.Properties) List(java.util.List) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) CharsetDecoder(java.nio.charset.CharsetDecoder) SchemaRegistryService(com.linkedin.databus2.schemas.SchemaRegistryService) Charset(java.nio.charset.Charset) SourceIdNameRegistry(com.linkedin.databus2.schemas.SourceIdNameRegistry) ByteBuffer(java.nio.ByteBuffer) DatabusRequest(com.linkedin.databus2.core.container.request.DatabusRequest) RegisterRequestProcessor(com.linkedin.databus.container.request.RegisterRequestProcessor) RegisterResponseEntry(com.linkedin.databus2.core.container.request.RegisterResponseEntry)

Aggregations

SchemaRegistryService (com.linkedin.databus2.schemas.SchemaRegistryService)11 DatabusException (com.linkedin.databus2.core.DatabusException)9 InvalidConfigException (com.linkedin.databus.core.util.InvalidConfigException)7 LogicalSource (com.linkedin.databus.core.data_model.LogicalSource)5 ChunkedWritableByteChannel (com.linkedin.databus2.core.container.ChunkedWritableByteChannel)5 RegisterResponseEntry (com.linkedin.databus2.core.container.request.RegisterResponseEntry)5 OracleTriggerMonitoredSourceInfo (com.linkedin.databus2.producers.db.OracleTriggerMonitoredSourceInfo)5 FileSystemSchemaRegistryService (com.linkedin.databus2.schemas.FileSystemSchemaRegistryService)5 VersionedSchema (com.linkedin.databus2.schemas.VersionedSchema)5 ArrayList (java.util.ArrayList)5 HashMap (java.util.HashMap)5 ObjectMapper (org.codehaus.jackson.map.ObjectMapper)5 RegisterRequestProcessor (com.linkedin.databus.container.request.RegisterRequestProcessor)4 DatabusRequest (com.linkedin.databus2.core.container.request.DatabusRequest)4 NoSuchSchemaException (com.linkedin.databus2.schemas.NoSuchSchemaException)4 SourceIdNameRegistry (com.linkedin.databus2.schemas.SourceIdNameRegistry)4 VersionedSchemaSet (com.linkedin.databus2.schemas.VersionedSchemaSet)4 IOException (java.io.IOException)4 ByteBuffer (java.nio.ByteBuffer)4 Charset (java.nio.charset.Charset)4