Search in sources :

Example 1 with SourceIdNameRegistry

use of com.linkedin.databus2.schemas.SourceIdNameRegistry in project databus by linkedin.

the class TestRegisterRequestProcessor method testRegisterReqProcessorVx.

// Test of happy path when the protocol version is specified as 2 or 3,
// or not specified at all.
// We should send out the source schemas only, and that too as a list.
private void testRegisterReqProcessorVx(final int protoVersion) throws Exception {
    LOG.info("Verifying happy path with protocol version: " + protoVersion);
    Properties params = new Properties();
    final int srcId1 = 101;
    final String srcName1 = "source-101";
    final String docSchema1 = "docSchema1";
    final String docSchema2 = "docSchema2";
    final short docSchemaV1 = 1;
    final short docSchemaV2 = 2;
    if (protoVersion != 0) {
        params.setProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, Integer.toString(protoVersion));
    }
    params.setProperty(RegisterRequestProcessor.SOURCES_PARAM, Integer.toString(srcId1));
    final StringBuilder responseStr = new StringBuilder();
    ChunkedWritableByteChannel chunkedWritableByteChannel = EasyMock.createMock(ChunkedWritableByteChannel.class);
    // We should write out proto-version as 3 if none was specified in the input, otherwise match the proto version
    chunkedWritableByteChannel.addMetadata(EasyMock.eq(DatabusHttpHeaders.DBUS_CLIENT_RELAY_PROTOCOL_VERSION_HDR), protoVersion != 0 ? EasyMock.eq(protoVersion) : EasyMock.eq(3));
    EasyMock.expectLastCall().times(1);
    chunkedWritableByteChannel.write(EasyMock.anyObject(ByteBuffer.class));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            Charset charset = Charset.forName("UTF-8");
            CharsetDecoder decoder = charset.newDecoder();
            responseStr.append(decoder.decode((ByteBuffer) EasyMock.getCurrentArguments()[0]));
            return responseStr.length();
        }
    });
    EasyMock.replay(chunkedWritableByteChannel);
    DatabusRequest mockReq = EasyMock.createMock(DatabusRequest.class);
    EasyMock.expect(mockReq.getParams()).andReturn(params).anyTimes();
    EasyMock.expect(mockReq.getResponseContent()).andReturn(chunkedWritableByteChannel);
    EasyMock.replay(mockReq);
    LogicalSource lsrc1 = new LogicalSource(srcId1, srcName1);
    SourceIdNameRegistry mockSrcIdReg = EasyMock.createMock(SourceIdNameRegistry.class);
    EasyMock.expect(mockSrcIdReg.getSource(srcId1)).andReturn(lsrc1).anyTimes();
    EasyMock.replay(mockSrcIdReg);
    Map<Short, String> srcSchemaVersions = new HashMap<Short, String>();
    srcSchemaVersions.put(docSchemaV1, docSchema1);
    srcSchemaVersions.put(docSchemaV2, docSchema2);
    SchemaRegistryService mockSchemaReg = EasyMock.createMock(SchemaRegistryService.class);
    EasyMock.expect(mockSchemaReg.fetchAllSchemaVersionsBySourceName(srcName1)).andReturn(srcSchemaVersions).anyTimes();
    EasyMock.replay(mockSchemaReg);
    HttpRelay mockRelay = EasyMock.createMock(HttpRelay.class);
    EasyMock.expect(mockRelay.getHttpStatisticsCollector()).andReturn(null).anyTimes();
    EasyMock.expect(mockRelay.getSourcesIdNameRegistry()).andReturn(mockSrcIdReg).anyTimes();
    EasyMock.expect(mockRelay.getSchemaRegistryService()).andReturn(mockSchemaReg).anyTimes();
    EasyMock.replay(mockRelay);
    RegisterRequestProcessor reqProcessor = new RegisterRequestProcessor(null, mockRelay);
    reqProcessor.process(mockReq);
    ObjectMapper mapper = new ObjectMapper();
    List<RegisterResponseEntry> schemasList = mapper.readValue(responseStr.toString(), new TypeReference<List<RegisterResponseEntry>>() {
    });
    Map<Long, List<RegisterResponseEntry>> sourcesSchemasMap = RegisterResponseEntry.convertSchemaListToMap(schemasList);
    // There should be 1 entry in the map.
    Assert.assertEquals(1, sourcesSchemasMap.size());
    Assert.assertEquals(2, sourcesSchemasMap.get(new Long(srcId1)).size());
    for (RegisterResponseEntry r : sourcesSchemasMap.get(new Long(srcId1))) {
        Assert.assertEquals(srcId1, r.getId());
        if (r.getVersion() == docSchemaV1) {
            Assert.assertEquals(docSchema1, r.getSchema());
        } else {
            Assert.assertEquals(docSchema2, r.getSchema());
        }
    }
    EasyMock.verify(mockRelay);
    EasyMock.verify(mockReq);
    EasyMock.verify(mockSchemaReg);
    EasyMock.verify(mockSrcIdReg);
}
Also used : ChunkedWritableByteChannel(com.linkedin.databus2.core.container.ChunkedWritableByteChannel) HashMap(java.util.HashMap) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) Properties(java.util.Properties) List(java.util.List) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) CharsetDecoder(java.nio.charset.CharsetDecoder) SchemaRegistryService(com.linkedin.databus2.schemas.SchemaRegistryService) Charset(java.nio.charset.Charset) SourceIdNameRegistry(com.linkedin.databus2.schemas.SourceIdNameRegistry) ByteBuffer(java.nio.ByteBuffer) DatabusRequest(com.linkedin.databus2.core.container.request.DatabusRequest) RegisterRequestProcessor(com.linkedin.databus.container.request.RegisterRequestProcessor) RegisterResponseEntry(com.linkedin.databus2.core.container.request.RegisterResponseEntry)

Example 2 with SourceIdNameRegistry

use of com.linkedin.databus2.schemas.SourceIdNameRegistry in project databus by linkedin.

the class TestRegisterRequestProcessor method testNullSchemasInGetSchemas.

private void testNullSchemasInGetSchemas(final int protoVersion) throws Exception {
    LOG.info("Testing null return from fetchAllSchemaVersionsBySourceName() with protoversion " + protoVersion);
    Properties params = new Properties();
    final int srcId1 = 101;
    final String srcName1 = "source-101";
    if (protoVersion != 0) {
        params.setProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, Integer.toString(protoVersion));
    }
    params.setProperty(RegisterRequestProcessor.SOURCES_PARAM, Integer.toString(srcId1));
    final StringBuilder responseStr = new StringBuilder();
    ChunkedWritableByteChannel chunkedWritableByteChannel = EasyMock.createMock(ChunkedWritableByteChannel.class);
    // We should write out proto-version as 3 if none was specified in the input, otherwise match the proto version
    chunkedWritableByteChannel.addMetadata(EasyMock.eq(DatabusHttpHeaders.DBUS_CLIENT_RELAY_PROTOCOL_VERSION_HDR), protoVersion != 0 ? EasyMock.eq(protoVersion) : EasyMock.eq(3));
    EasyMock.expectLastCall().times(1);
    chunkedWritableByteChannel.write(EasyMock.anyObject(ByteBuffer.class));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            Charset charset = Charset.forName("UTF-8");
            CharsetDecoder decoder = charset.newDecoder();
            responseStr.append(decoder.decode((ByteBuffer) EasyMock.getCurrentArguments()[0]));
            return responseStr.length();
        }
    });
    EasyMock.replay(chunkedWritableByteChannel);
    DatabusRequest mockReq = EasyMock.createMock(DatabusRequest.class);
    EasyMock.expect(mockReq.getParams()).andReturn(params).anyTimes();
    EasyMock.expect(mockReq.getResponseContent()).andReturn(chunkedWritableByteChannel);
    EasyMock.replay(mockReq);
    LogicalSource lsrc1 = new LogicalSource(srcId1, srcName1);
    SourceIdNameRegistry mockSrcIdReg = EasyMock.createMock(SourceIdNameRegistry.class);
    EasyMock.expect(mockSrcIdReg.getSource(srcId1)).andReturn(lsrc1).anyTimes();
    EasyMock.replay(mockSrcIdReg);
    SchemaRegistryService mockSchemaReg = EasyMock.createMock(SchemaRegistryService.class);
    EasyMock.expect(mockSchemaReg.fetchAllSchemaVersionsBySourceName(srcName1)).andReturn(null);
    EasyMock.replay(mockSchemaReg);
    HttpRelay mockRelay = EasyMock.createMock(HttpRelay.class);
    EasyMock.expect(mockRelay.getHttpStatisticsCollector()).andReturn(null).anyTimes();
    EasyMock.expect(mockRelay.getSourcesIdNameRegistry()).andReturn(mockSrcIdReg).anyTimes();
    EasyMock.expect(mockRelay.getSchemaRegistryService()).andReturn(mockSchemaReg).anyTimes();
    EasyMock.replay(mockRelay);
    RegisterRequestProcessor reqProcessor = new RegisterRequestProcessor(null, mockRelay);
    reqProcessor.process(mockReq);
    ObjectMapper mapper = new ObjectMapper();
    List<RegisterResponseEntry> schemasList = mapper.readValue(responseStr.toString(), new TypeReference<List<RegisterResponseEntry>>() {
    });
    Map<Long, List<RegisterResponseEntry>> sourcesSchemasMap = RegisterResponseEntry.convertSchemaListToMap(schemasList);
    // There should be 1 entry in the map.
    Assert.assertEquals(0, sourcesSchemasMap.size());
    EasyMock.verify(mockRelay);
    EasyMock.verify(mockReq);
    EasyMock.verify(mockSchemaReg);
    EasyMock.verify(mockSrcIdReg);
}
Also used : ChunkedWritableByteChannel(com.linkedin.databus2.core.container.ChunkedWritableByteChannel) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) Properties(java.util.Properties) List(java.util.List) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) CharsetDecoder(java.nio.charset.CharsetDecoder) SchemaRegistryService(com.linkedin.databus2.schemas.SchemaRegistryService) Charset(java.nio.charset.Charset) SourceIdNameRegistry(com.linkedin.databus2.schemas.SourceIdNameRegistry) ByteBuffer(java.nio.ByteBuffer) DatabusRequest(com.linkedin.databus2.core.container.request.DatabusRequest) RegisterRequestProcessor(com.linkedin.databus.container.request.RegisterRequestProcessor) RegisterResponseEntry(com.linkedin.databus2.core.container.request.RegisterResponseEntry)

Example 3 with SourceIdNameRegistry

use of com.linkedin.databus2.schemas.SourceIdNameRegistry in project databus by linkedin.

the class ReadEventsRequestProcessor method process.

@Override
public DatabusRequest process(DatabusRequest request) throws IOException, RequestProcessingException, DatabusException {
    boolean isDebug = LOG.isDebugEnabled();
    try {
        ObjectMapper objMapper = new ObjectMapper();
        String checkpointString = request.getParams().getProperty(CHECKPOINT_PARAM, null);
        String checkpointStringMult = request.getParams().getProperty(CHECKPOINT_PARAM_MULT, null);
        int fetchSize = request.getRequiredIntParam(FETCH_SIZE_PARAM);
        String formatStr = request.getRequiredStringParam(OUTPUT_FORMAT_PARAM);
        Encoding enc = Encoding.valueOf(formatStr.toUpperCase());
        String sourcesListStr = request.getParams().getProperty(SOURCES_PARAM, null);
        String subsStr = request.getParams().getProperty(SUBS_PARAM, null);
        String partitionInfoStr = request.getParams().getProperty(PARTITION_INFO_STRING);
        String streamFromLatestSCNStr = request.getParams().getProperty(STREAM_FROM_LATEST_SCN);
        String clientMaxEventVersionStr = request.getParams().getProperty(DatabusHttpHeaders.MAX_EVENT_VERSION);
        int clientEventVersion = (clientMaxEventVersionStr != null) ? Integer.parseInt(clientMaxEventVersionStr) : DbusEventFactory.DBUS_EVENT_V1;
        if (clientEventVersion < 0 || clientEventVersion == 1 || clientEventVersion > DbusEventFactory.DBUS_EVENT_V2) {
            throw new InvalidRequestParamValueException(COMMAND_NAME, DatabusHttpHeaders.MAX_EVENT_VERSION, clientMaxEventVersionStr);
        }
        if (null == sourcesListStr && null == subsStr) {
            throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM + "|" + SUBS_PARAM, "null");
        }
        //TODO for now we separte the code paths to limit the impact on existing Databus 2 deployments (DDSDBUS-79)
        //We have to get rid of this eventually and have a single data path.
        boolean v2Mode = null == subsStr;
        DbusKeyCompositeFilter keyCompositeFilter = null;
        if (null != partitionInfoStr) {
            try {
                Map<Long, DbusKeyFilter> fMap = KeyFilterConfigJSONFactory.parseSrcIdFilterConfigMap(partitionInfoStr);
                keyCompositeFilter = new DbusKeyCompositeFilter();
                keyCompositeFilter.setFilterMap(fMap);
                if (isDebug)
                    LOG.debug("keyCompositeFilter is :" + keyCompositeFilter);
            } catch (Exception ex) {
                String msg = "Got exception while parsing partition Configs. PartitionInfo is:" + partitionInfoStr;
                LOG.error(msg, ex);
                throw new InvalidRequestParamValueException(COMMAND_NAME, PARTITION_INFO_STRING, partitionInfoStr);
            }
        }
        boolean streamFromLatestSCN = false;
        if (null != streamFromLatestSCNStr) {
            streamFromLatestSCN = Boolean.valueOf(streamFromLatestSCNStr);
        }
        long start = System.currentTimeMillis();
        List<DatabusSubscription> subs = null;
        //parse source ids
        SourceIdNameRegistry srcRegistry = _relay.getSourcesIdNameRegistry();
        HashSet<Integer> sourceIds = new HashSet<Integer>();
        if (null != sourcesListStr) {
            String[] sourcesList = sourcesListStr.split(",");
            for (String sourceId : sourcesList) {
                try {
                    Integer srcId = Integer.valueOf(sourceId);
                    sourceIds.add(srcId);
                } catch (NumberFormatException nfe) {
                    HttpStatisticsCollector globalHttpStatsCollector = _relay.getHttpStatisticsCollector();
                    if (null != globalHttpStatsCollector) {
                        globalHttpStatsCollector.registerInvalidStreamRequest();
                    }
                    throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, sourceId);
                }
            }
        }
        //process explicit subscriptions and generate respective logical partition filters
        NavigableSet<PhysicalPartitionKey> ppartKeys = null;
        if (null != subsStr) {
            List<DatabusSubscription.Builder> subsBuilder = null;
            subsBuilder = objMapper.readValue(subsStr, new TypeReference<List<DatabusSubscription.Builder>>() {
            });
            subs = new ArrayList<DatabusSubscription>(subsBuilder.size());
            for (DatabusSubscription.Builder subBuilder : subsBuilder) {
                subs.add(subBuilder.build());
            }
            ppartKeys = new TreeSet<PhysicalPartitionKey>();
            for (DatabusSubscription sub : subs) {
                PhysicalPartition ppart = sub.getPhysicalPartition();
                if (ppart.isAnyPartitionWildcard()) {
                    ppartKeys = _eventBuffer.getAllPhysicalPartitionKeys();
                    break;
                } else {
                    ppartKeys.add(new PhysicalPartitionKey(ppart));
                }
            }
        }
        // Need to make sure that we don't have tests that send requests in this form.
        if (subs != null && checkpointStringMult == null && checkpointString != null) {
            throw new RequestProcessingException("Both Subscriptions and CheckpointMult should be present");
        }
        //convert source ids into subscriptions
        if (null == subs)
            subs = new ArrayList<DatabusSubscription>();
        for (Integer srcId : sourceIds) {
            LogicalSource lsource = srcRegistry.getSource(srcId);
            if (lsource == null)
                throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, srcId.toString());
            if (isDebug)
                LOG.debug("registry returns " + lsource + " for srcid=" + srcId);
            DatabusSubscription newSub = DatabusSubscription.createSimpleSourceSubscription(lsource);
            subs.add(newSub);
        }
        DbusFilter ppartFilters = null;
        if (subs.size() > 0) {
            try {
                ppartFilters = _eventBuffer.constructFilters(subs);
            } catch (DatabusException de) {
                throw new RequestProcessingException("unable to generate physical partitions filters:" + de.getMessage(), de);
            }
        }
        ConjunctionDbusFilter filters = new ConjunctionDbusFilter();
        // Source filter comes first
        if (v2Mode)
            filters.addFilter(new SourceDbusFilter(sourceIds));
        else if (null != ppartFilters)
            filters.addFilter(ppartFilters);
        /*
      // Key range filter comes next
      if ((keyMin >0) && (keyMax > 0))
      {
        filters.addFilter(new KeyRangeFilter(keyMin, keyMax));
      }
      */
        if (null != keyCompositeFilter) {
            filters.addFilter(keyCompositeFilter);
        }
        // need to update registerStreamRequest to support Mult checkpoint TODO (DDSDBUS-80)
        // temp solution
        // 3 options:
        // 1. checkpointStringMult not null - generate checkpoint from it
        // 2. checkpointStringMult null, checkpointString not null - create empty CheckpointMult
        // and add create Checkpoint(checkpointString) and add it to cpMult;
        // 3 both are null - create empty CheckpointMult and add empty Checkpoint to it for each ppartition
        PhysicalPartition pPartition;
        Checkpoint cp = null;
        CheckpointMult cpMult = null;
        if (checkpointStringMult != null) {
            try {
                cpMult = new CheckpointMult(checkpointStringMult);
            } catch (InvalidParameterSpecException e) {
                LOG.error("Invalid CheckpointMult:" + checkpointStringMult, e);
                throw new InvalidRequestParamValueException("stream", "CheckpointMult", checkpointStringMult);
            }
        } else {
            // there is no checkpoint - create an empty one
            cpMult = new CheckpointMult();
            Iterator<Integer> it = sourceIds.iterator();
            while (it.hasNext()) {
                Integer srcId = it.next();
                pPartition = _eventBuffer.getPhysicalPartition(srcId);
                if (pPartition == null)
                    throw new RequestProcessingException("unable to find physical partitions for source:" + srcId);
                if (checkpointString != null) {
                    cp = new Checkpoint(checkpointString);
                } else {
                    cp = new Checkpoint();
                    cp.setFlexible();
                }
                cpMult.addCheckpoint(pPartition, cp);
            }
        }
        if (isDebug)
            LOG.debug("checkpointStringMult = " + checkpointStringMult + ";singlecheckpointString=" + checkpointString + ";CPM=" + cpMult);
        // of the server context.
        if (cpMult.getCursorPartition() == null) {
            cpMult.setCursorPartition(request.getCursorPartition());
        }
        if (isDebug) {
            if (cpMult.getCursorPartition() != null) {
                LOG.debug("Using physical paritition cursor " + cpMult.getCursorPartition());
            }
        }
        // for registerStreamRequest we need a single Checkpoint (TODO - fix it) (DDSDBUS-81)
        if (cp == null) {
            Iterator<Integer> it = sourceIds.iterator();
            if (it.hasNext()) {
                Integer srcId = it.next();
                pPartition = _eventBuffer.getPhysicalPartition(srcId);
                cp = cpMult.getCheckpoint(pPartition);
            } else {
                cp = new Checkpoint();
                cp.setFlexible();
            }
        }
        if (null != checkpointString && isDebug)
            LOG.debug("About to stream from cp: " + checkpointString.toString());
        HttpStatisticsCollector globalHttpStatsCollector = _relay.getHttpStatisticsCollector();
        HttpStatisticsCollector connHttpStatsCollector = null;
        if (null != globalHttpStatsCollector) {
            connHttpStatsCollector = (HttpStatisticsCollector) request.getParams().get(globalHttpStatsCollector.getName());
        }
        if (null != globalHttpStatsCollector)
            globalHttpStatsCollector.registerStreamRequest(cp, sourceIds);
        StatsCollectors<DbusEventsStatisticsCollector> statsCollectors = _relay.getOutBoundStatsCollectors();
        try {
            DbusEventBufferBatchReadable bufRead = v2Mode ? _eventBuffer.getDbusEventBufferBatchReadable(sourceIds, cpMult, statsCollectors) : _eventBuffer.getDbusEventBufferBatchReadable(cpMult, ppartKeys, statsCollectors);
            int eventsRead = 0;
            int minPendingEventSize = 0;
            StreamEventsResult result = null;
            bufRead.setClientMaxEventVersion(clientEventVersion);
            if (v2Mode) {
                result = bufRead.streamEvents(streamFromLatestSCN, fetchSize, request.getResponseContent(), enc, filters);
                eventsRead = result.getNumEventsStreamed();
                minPendingEventSize = result.getSizeOfPendingEvent();
                if (isDebug) {
                    LOG.debug("Process: streamed " + eventsRead + " from sources " + Arrays.toString(sourceIds.toArray()));
                    //can be used for debugging to stream from a cp
                    LOG.debug("CP=" + cpMult);
                }
            //if (null != statsCollectors) statsCollectors.mergeStatsCollectors();
            } else {
                result = bufRead.streamEvents(streamFromLatestSCN, fetchSize, request.getResponseContent(), enc, filters);
                eventsRead = result.getNumEventsStreamed();
                minPendingEventSize = result.getSizeOfPendingEvent();
                if (isDebug)
                    LOG.debug("Process: streamed " + eventsRead + " with subscriptions " + subs);
                cpMult = bufRead.getCheckpointMult();
                if (cpMult != null) {
                    request.setCursorPartition(cpMult.getCursorPartition());
                }
            }
            if (eventsRead == 0 && minPendingEventSize > 0) {
                // Append a header to indicate to the client that we do have at least one event to
                // send, but it is too large to fit into client's offered buffer.
                request.getResponseContent().addMetadata(DatabusHttpHeaders.DATABUS_PENDING_EVENT_SIZE, minPendingEventSize);
                LOG.debug("Returning 0 events but have pending event of size " + minPendingEventSize);
            }
        } catch (ScnNotFoundException snfe) {
            if (null != globalHttpStatsCollector) {
                globalHttpStatsCollector.registerScnNotFoundStreamResponse();
            }
            throw new RequestProcessingException(snfe);
        } catch (OffsetNotFoundException snfe) {
            LOG.error("OffsetNotFound", snfe);
            if (null != globalHttpStatsCollector) {
                globalHttpStatsCollector.registerScnNotFoundStreamResponse();
            }
            throw new RequestProcessingException(snfe);
        }
        if (null != connHttpStatsCollector) {
            connHttpStatsCollector.registerStreamResponse(System.currentTimeMillis() - start);
            globalHttpStatsCollector.merge(connHttpStatsCollector);
            connHttpStatsCollector.reset();
        } else if (null != globalHttpStatsCollector) {
            globalHttpStatsCollector.registerStreamResponse(System.currentTimeMillis() - start);
        }
    } catch (InvalidRequestParamValueException e) {
        HttpStatisticsCollector globalHttpStatsCollector = _relay.getHttpStatisticsCollector();
        if (null != globalHttpStatsCollector) {
            globalHttpStatsCollector.registerInvalidStreamRequest();
        }
        throw e;
    }
    return request;
}
Also used : CheckpointMult(com.linkedin.databus.core.CheckpointMult) SourceDbusFilter(com.linkedin.databus2.core.filter.SourceDbusFilter) ArrayList(java.util.ArrayList) DbusEventsStatisticsCollector(com.linkedin.databus.core.monitoring.mbean.DbusEventsStatisticsCollector) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) HttpStatisticsCollector(com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector) ScnNotFoundException(com.linkedin.databus.core.ScnNotFoundException) RequestProcessingException(com.linkedin.databus2.core.container.request.RequestProcessingException) TypeReference(org.codehaus.jackson.type.TypeReference) InvalidParameterSpecException(java.security.spec.InvalidParameterSpecException) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) PhysicalPartition(com.linkedin.databus.core.data_model.PhysicalPartition) HashSet(java.util.HashSet) StreamEventsResult(com.linkedin.databus.core.StreamEventsResult) Encoding(com.linkedin.databus.core.Encoding) SourceIdNameRegistry(com.linkedin.databus2.schemas.SourceIdNameRegistry) DatabusSubscription(com.linkedin.databus.core.data_model.DatabusSubscription) ConjunctionDbusFilter(com.linkedin.databus2.core.filter.ConjunctionDbusFilter) SourceDbusFilter(com.linkedin.databus2.core.filter.SourceDbusFilter) DbusFilter(com.linkedin.databus2.core.filter.DbusFilter) InvalidRequestParamValueException(com.linkedin.databus2.core.container.request.InvalidRequestParamValueException) Checkpoint(com.linkedin.databus.core.Checkpoint) DbusKeyFilter(com.linkedin.databus2.core.filter.DbusKeyFilter) InvalidRequestParamValueException(com.linkedin.databus2.core.container.request.InvalidRequestParamValueException) ScnNotFoundException(com.linkedin.databus.core.ScnNotFoundException) InvalidParameterSpecException(java.security.spec.InvalidParameterSpecException) OffsetNotFoundException(com.linkedin.databus.core.OffsetNotFoundException) DatabusException(com.linkedin.databus2.core.DatabusException) IOException(java.io.IOException) RequestProcessingException(com.linkedin.databus2.core.container.request.RequestProcessingException) Checkpoint(com.linkedin.databus.core.Checkpoint) DbusEventBufferBatchReadable(com.linkedin.databus.core.DbusEventBufferBatchReadable) PhysicalPartitionKey(com.linkedin.databus.core.DbusEventBufferMult.PhysicalPartitionKey) DatabusException(com.linkedin.databus2.core.DatabusException) OffsetNotFoundException(com.linkedin.databus.core.OffsetNotFoundException) ConjunctionDbusFilter(com.linkedin.databus2.core.filter.ConjunctionDbusFilter) DbusKeyCompositeFilter(com.linkedin.databus2.core.filter.DbusKeyCompositeFilter)

Example 4 with SourceIdNameRegistry

use of com.linkedin.databus2.schemas.SourceIdNameRegistry in project databus by linkedin.

the class TestRegisterRequestProcessor method testDatabusExceptionInGetSchemas.

private void testDatabusExceptionInGetSchemas(final int protoVersion) throws Exception {
    LOG.info("Testing DatabusException in getSchemas() call with protocol version " + protoVersion);
    Properties params = new Properties();
    final int srcId1 = 101;
    final String srcName1 = "source-101";
    final String docSchema1 = "docSchema1";
    final String docSchema2 = "docSchema2";
    final short docSchemaV1 = 1;
    final short docSchemaV2 = 2;
    if (protoVersion != 0) {
        params.setProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, Integer.toString(protoVersion));
    }
    params.setProperty(RegisterRequestProcessor.SOURCES_PARAM, Integer.toString(srcId1));
    final StringBuilder responseStr = new StringBuilder();
    ChunkedWritableByteChannel chunkedWritableByteChannel = EasyMock.createMock(ChunkedWritableByteChannel.class);
    chunkedWritableByteChannel.addMetadata(EasyMock.anyObject(String.class), EasyMock.anyInt());
    EasyMock.expectLastCall().times(1);
    chunkedWritableByteChannel.write(EasyMock.anyObject(ByteBuffer.class));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            Charset charset = Charset.forName("UTF-8");
            CharsetDecoder decoder = charset.newDecoder();
            responseStr.append(decoder.decode((ByteBuffer) EasyMock.getCurrentArguments()[0]));
            return responseStr.length();
        }
    });
    EasyMock.replay(chunkedWritableByteChannel);
    DatabusRequest mockReq = EasyMock.createMock(DatabusRequest.class);
    EasyMock.expect(mockReq.getParams()).andReturn(params).anyTimes();
    EasyMock.replay(mockReq);
    LogicalSource lsrc1 = new LogicalSource(srcId1, srcName1);
    SourceIdNameRegistry mockSrcIdReg = EasyMock.createMock(SourceIdNameRegistry.class);
    EasyMock.expect(mockSrcIdReg.getSource(srcId1)).andReturn(lsrc1).anyTimes();
    EasyMock.replay(mockSrcIdReg);
    Map<Short, String> srcSchemaVersions = new HashMap<Short, String>();
    srcSchemaVersions.put(docSchemaV1, docSchema1);
    srcSchemaVersions.put(docSchemaV2, docSchema2);
    DatabusException expectedCause = new DatabusException("FakeException");
    SchemaRegistryService mockSchemaReg = EasyMock.createMock(SchemaRegistryService.class);
    EasyMock.expect(mockSchemaReg.fetchAllSchemaVersionsBySourceName(srcName1)).andThrow(expectedCause);
    EasyMock.replay(mockSchemaReg);
    HttpRelay mockRelay = EasyMock.createMock(HttpRelay.class);
    EasyMock.expect(mockRelay.getHttpStatisticsCollector()).andReturn(null).anyTimes();
    EasyMock.expect(mockRelay.getSourcesIdNameRegistry()).andReturn(mockSrcIdReg).anyTimes();
    EasyMock.expect(mockRelay.getSchemaRegistryService()).andReturn(mockSchemaReg).anyTimes();
    EasyMock.replay(mockRelay);
    RegisterRequestProcessor reqProcessor = new RegisterRequestProcessor(null, mockRelay);
    boolean exceptionCaught = false;
    try {
        reqProcessor.process(mockReq);
    } catch (RequestProcessingException e) {
        Assert.assertEquals(expectedCause, e.getCause());
    }
    EasyMock.verify(mockRelay);
    EasyMock.verify(mockReq);
    EasyMock.verify(mockSchemaReg);
    EasyMock.verify(mockSrcIdReg);
}
Also used : ChunkedWritableByteChannel(com.linkedin.databus2.core.container.ChunkedWritableByteChannel) HashMap(java.util.HashMap) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) Properties(java.util.Properties) RequestProcessingException(com.linkedin.databus2.core.container.request.RequestProcessingException) CharsetDecoder(java.nio.charset.CharsetDecoder) SchemaRegistryService(com.linkedin.databus2.schemas.SchemaRegistryService) Charset(java.nio.charset.Charset) SourceIdNameRegistry(com.linkedin.databus2.schemas.SourceIdNameRegistry) ByteBuffer(java.nio.ByteBuffer) DatabusRequest(com.linkedin.databus2.core.container.request.DatabusRequest) DatabusException(com.linkedin.databus2.core.DatabusException) RegisterRequestProcessor(com.linkedin.databus.container.request.RegisterRequestProcessor)

Example 5 with SourceIdNameRegistry

use of com.linkedin.databus2.schemas.SourceIdNameRegistry in project databus by linkedin.

the class TestRegisterRequestProcessor method testV4RegisterRequestProcessor.

// Test the happy path where there are 2 versions of document schema of a table and one version of metadata
// schema in the registry.
@Test
public void testV4RegisterRequestProcessor() throws Exception {
    Properties params = new Properties();
    final int protoVersion = 4;
    final int srcId1 = 101;
    final String srcName1 = "source-101";
    final String docSchema1 = "docSchema1";
    final String docSchema2 = "docSchema2";
    final String metadataSchema1 = makeMetadataSchema(1);
    final String metadataSchema2 = makeMetadataSchema(2);
    final byte[] metaSchemaDigest1 = new byte[] { 32, 33, 34, 35 };
    final byte[] metaSchemaDigest2 = new byte[] { 35, 34, 33, 32 };
    final short docSchemaV1 = 1;
    final short docSchemaV2 = 2;
    final short metaSchemaV1 = 1;
    final short metaSchemaV2 = 2;
    params.setProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, Integer.toString(protoVersion));
    params.setProperty(RegisterRequestProcessor.SOURCES_PARAM, Integer.toString(srcId1));
    final StringBuilder responseStr = new StringBuilder();
    ChunkedWritableByteChannel chunkedWritableByteChannel = EasyMock.createMock(ChunkedWritableByteChannel.class);
    chunkedWritableByteChannel.addMetadata(EasyMock.eq(DatabusHttpHeaders.DBUS_CLIENT_RELAY_PROTOCOL_VERSION_HDR), EasyMock.eq(protoVersion));
    chunkedWritableByteChannel.write(EasyMock.anyObject(ByteBuffer.class));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Object>() {

        @Override
        public Object answer() throws Throwable {
            Charset charset = Charset.forName("UTF-8");
            CharsetDecoder decoder = charset.newDecoder();
            responseStr.append(decoder.decode((ByteBuffer) EasyMock.getCurrentArguments()[0]));
            return responseStr.length();
        }
    });
    EasyMock.replay(chunkedWritableByteChannel);
    DatabusRequest mockReq = EasyMock.createMock(DatabusRequest.class);
    EasyMock.expect(mockReq.getParams()).andReturn(params).anyTimes();
    EasyMock.expect(mockReq.getResponseContent()).andReturn(chunkedWritableByteChannel);
    EasyMock.replay(mockReq);
    LogicalSource lsrc1 = new LogicalSource(srcId1, srcName1);
    SourceIdNameRegistry mockSrcIdReg = EasyMock.createMock(SourceIdNameRegistry.class);
    EasyMock.expect(mockSrcIdReg.getSource(srcId1)).andReturn(lsrc1).anyTimes();
    EasyMock.replay(mockSrcIdReg);
    Map<Short, String> srcSchemaVersions = new HashMap<Short, String>();
    srcSchemaVersions.put(docSchemaV1, docSchema1);
    srcSchemaVersions.put(docSchemaV2, docSchema2);
    VersionedSchemaSet metadataSchemaSet = new VersionedSchemaSet();
    metadataSchemaSet.add(SchemaRegistryService.DEFAULT_METADATA_SCHEMA_SOURCE, metaSchemaV1, new SchemaId(metaSchemaDigest1), metadataSchema1, true);
    metadataSchemaSet.add(SchemaRegistryService.DEFAULT_METADATA_SCHEMA_SOURCE, metaSchemaV2, new SchemaId(metaSchemaDigest2), metadataSchema2, true);
    SchemaRegistryService mockSchemaReg = EasyMock.createMock(SchemaRegistryService.class);
    EasyMock.expect(mockSchemaReg.fetchAllSchemaVersionsBySourceName(srcName1)).andReturn(srcSchemaVersions).anyTimes();
    EasyMock.expect(mockSchemaReg.fetchAllMetadataSchemaVersions()).andReturn(metadataSchemaSet).anyTimes();
    EasyMock.replay(mockSchemaReg);
    HttpRelay mockRelay = EasyMock.createMock(HttpRelay.class);
    EasyMock.expect(mockRelay.getHttpStatisticsCollector()).andReturn(null).anyTimes();
    EasyMock.expect(mockRelay.getSourcesIdNameRegistry()).andReturn(mockSrcIdReg).anyTimes();
    EasyMock.expect(mockRelay.getSchemaRegistryService()).andReturn(mockSchemaReg).anyTimes();
    EasyMock.replay(mockRelay);
    RegisterRequestProcessor reqProcessor = new RegisterRequestProcessor(null, mockRelay);
    reqProcessor.process(mockReq);
    // Decode
    ObjectMapper mapper = new ObjectMapper();
    HashMap<String, List<Object>> responseMap = mapper.readValue(responseStr.toString(), new TypeReference<HashMap<String, List<Object>>>() {
    });
    Map<Long, List<RegisterResponseEntry>> sourcesSchemasMap = RegisterResponseEntry.createFromResponse(responseMap, RegisterResponseEntry.SOURCE_SCHEMAS_KEY, false);
    // There should be one entry in the map, which is a  list.
    Assert.assertEquals(1, sourcesSchemasMap.size());
    Assert.assertEquals(2, sourcesSchemasMap.get(new Long(srcId1)).size());
    for (RegisterResponseEntry r : sourcesSchemasMap.get(new Long(srcId1))) {
        Assert.assertEquals(srcId1, r.getId());
        if (r.getVersion() == docSchemaV1) {
            Assert.assertEquals(docSchema1, r.getSchema());
        } else {
            Assert.assertEquals(docSchema2, r.getSchema());
        }
    }
    Map<Long, List<RegisterResponseEntry>> keysSchemasMap = RegisterResponseEntry.createFromResponse(responseMap, RegisterResponseEntry.KEY_SCHEMAS_KEY, true);
    Assert.assertNull(keysSchemasMap);
    List<RegisterResponseMetadataEntry> metadataSchemasList = RegisterResponseMetadataEntry.createFromResponse(responseMap, RegisterResponseMetadataEntry.METADATA_SCHEMAS_KEY, true);
    // The response should contain the exact string that the schema registry has.
    Assert.assertEquals(2, metadataSchemasList.size());
    for (RegisterResponseMetadataEntry r : metadataSchemasList) {
        if (r.getVersion() == 1) {
            Assert.assertEquals(metadataSchema1, r.getSchema());
            Assert.assertTrue(Arrays.equals(metaSchemaDigest1, r.getCrc32()));
        } else {
            Assert.assertEquals(metadataSchema2, r.getSchema());
            Assert.assertTrue(Arrays.equals(metaSchemaDigest2, r.getCrc32()));
        }
    }
    EasyMock.verify(mockRelay);
    EasyMock.verify(mockReq);
    EasyMock.verify(mockSchemaReg);
    EasyMock.verify(mockSrcIdReg);
}
Also used : ChunkedWritableByteChannel(com.linkedin.databus2.core.container.ChunkedWritableByteChannel) HashMap(java.util.HashMap) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) Properties(java.util.Properties) VersionedSchemaSet(com.linkedin.databus2.schemas.VersionedSchemaSet) List(java.util.List) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) CharsetDecoder(java.nio.charset.CharsetDecoder) SchemaRegistryService(com.linkedin.databus2.schemas.SchemaRegistryService) Charset(java.nio.charset.Charset) SourceIdNameRegistry(com.linkedin.databus2.schemas.SourceIdNameRegistry) ByteBuffer(java.nio.ByteBuffer) DatabusRequest(com.linkedin.databus2.core.container.request.DatabusRequest) RegisterResponseMetadataEntry(com.linkedin.databus2.core.container.request.RegisterResponseMetadataEntry) RegisterRequestProcessor(com.linkedin.databus.container.request.RegisterRequestProcessor) SchemaId(com.linkedin.databus2.schemas.SchemaId) RegisterResponseEntry(com.linkedin.databus2.core.container.request.RegisterResponseEntry) Test(org.testng.annotations.Test)

Aggregations

LogicalSource (com.linkedin.databus.core.data_model.LogicalSource)5 SourceIdNameRegistry (com.linkedin.databus2.schemas.SourceIdNameRegistry)5 RegisterRequestProcessor (com.linkedin.databus.container.request.RegisterRequestProcessor)4 ChunkedWritableByteChannel (com.linkedin.databus2.core.container.ChunkedWritableByteChannel)4 DatabusRequest (com.linkedin.databus2.core.container.request.DatabusRequest)4 SchemaRegistryService (com.linkedin.databus2.schemas.SchemaRegistryService)4 ByteBuffer (java.nio.ByteBuffer)4 Charset (java.nio.charset.Charset)4 CharsetDecoder (java.nio.charset.CharsetDecoder)4 Properties (java.util.Properties)4 ObjectMapper (org.codehaus.jackson.map.ObjectMapper)4 RegisterResponseEntry (com.linkedin.databus2.core.container.request.RegisterResponseEntry)3 HashMap (java.util.HashMap)3 DatabusException (com.linkedin.databus2.core.DatabusException)2 RequestProcessingException (com.linkedin.databus2.core.container.request.RequestProcessingException)2 List (java.util.List)2 Checkpoint (com.linkedin.databus.core.Checkpoint)1 CheckpointMult (com.linkedin.databus.core.CheckpointMult)1 DbusEventBufferBatchReadable (com.linkedin.databus.core.DbusEventBufferBatchReadable)1 PhysicalPartitionKey (com.linkedin.databus.core.DbusEventBufferMult.PhysicalPartitionKey)1