Search in sources :

Example 1 with DbusFilter

use of com.linkedin.databus2.core.filter.DbusFilter in project databus by linkedin.

the class BootstrapProcessor method getFilterSQL.

private String getFilterSQL() {
    if (// No filter is defined.
    keyFilter == null)
        return EMPTY_STRING;
    ArrayList<DbusFilter> filters = keyFilter.getFilters();
    ArrayList<String> filterStrings = new ArrayList<String>(filters.size());
    for (int i = 0; i < filters.size(); i++) {
        String filterStringTemp = FilterToSQL.convertToSQL(filters.get(i));
        if (filterStringTemp != EMPTY_STRING)
            filterStrings.add(filterStringTemp);
    }
    //check for none partitions - do we have any filters to apply ?
    if (filterStrings.size() == 0)
        return EMPTY_STRING;
    //build the filter string
    StringBuilder filterSqlBuilder = new StringBuilder();
    filterSqlBuilder.append(" ( ");
    for (int i = 0; i < filterStrings.size(); i++) {
        filterSqlBuilder.append(filterStrings.get(i));
        if (i != filterStrings.size() - 1)
            filterSqlBuilder.append(" OR ");
    }
    filterSqlBuilder.append(" ) ");
    return filterSqlBuilder.toString();
}
Also used : ArrayList(java.util.ArrayList) DbusFilter(com.linkedin.databus2.core.filter.DbusFilter) Checkpoint(com.linkedin.databus.core.Checkpoint)

Example 2 with DbusFilter

use of com.linkedin.databus2.core.filter.DbusFilter in project databus by linkedin.

the class ReadEventsRequestProcessor method process.

@Override
public DatabusRequest process(DatabusRequest request) throws IOException, RequestProcessingException, DatabusException {
    boolean isDebug = LOG.isDebugEnabled();
    try {
        ObjectMapper objMapper = new ObjectMapper();
        String checkpointString = request.getParams().getProperty(CHECKPOINT_PARAM, null);
        String checkpointStringMult = request.getParams().getProperty(CHECKPOINT_PARAM_MULT, null);
        int fetchSize = request.getRequiredIntParam(FETCH_SIZE_PARAM);
        String formatStr = request.getRequiredStringParam(OUTPUT_FORMAT_PARAM);
        Encoding enc = Encoding.valueOf(formatStr.toUpperCase());
        String sourcesListStr = request.getParams().getProperty(SOURCES_PARAM, null);
        String subsStr = request.getParams().getProperty(SUBS_PARAM, null);
        String partitionInfoStr = request.getParams().getProperty(PARTITION_INFO_STRING);
        String streamFromLatestSCNStr = request.getParams().getProperty(STREAM_FROM_LATEST_SCN);
        String clientMaxEventVersionStr = request.getParams().getProperty(DatabusHttpHeaders.MAX_EVENT_VERSION);
        int clientEventVersion = (clientMaxEventVersionStr != null) ? Integer.parseInt(clientMaxEventVersionStr) : DbusEventFactory.DBUS_EVENT_V1;
        if (clientEventVersion < 0 || clientEventVersion == 1 || clientEventVersion > DbusEventFactory.DBUS_EVENT_V2) {
            throw new InvalidRequestParamValueException(COMMAND_NAME, DatabusHttpHeaders.MAX_EVENT_VERSION, clientMaxEventVersionStr);
        }
        if (null == sourcesListStr && null == subsStr) {
            throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM + "|" + SUBS_PARAM, "null");
        }
        //TODO for now we separte the code paths to limit the impact on existing Databus 2 deployments (DDSDBUS-79)
        //We have to get rid of this eventually and have a single data path.
        boolean v2Mode = null == subsStr;
        DbusKeyCompositeFilter keyCompositeFilter = null;
        if (null != partitionInfoStr) {
            try {
                Map<Long, DbusKeyFilter> fMap = KeyFilterConfigJSONFactory.parseSrcIdFilterConfigMap(partitionInfoStr);
                keyCompositeFilter = new DbusKeyCompositeFilter();
                keyCompositeFilter.setFilterMap(fMap);
                if (isDebug)
                    LOG.debug("keyCompositeFilter is :" + keyCompositeFilter);
            } catch (Exception ex) {
                String msg = "Got exception while parsing partition Configs. PartitionInfo is:" + partitionInfoStr;
                LOG.error(msg, ex);
                throw new InvalidRequestParamValueException(COMMAND_NAME, PARTITION_INFO_STRING, partitionInfoStr);
            }
        }
        boolean streamFromLatestSCN = false;
        if (null != streamFromLatestSCNStr) {
            streamFromLatestSCN = Boolean.valueOf(streamFromLatestSCNStr);
        }
        long start = System.currentTimeMillis();
        List<DatabusSubscription> subs = null;
        //parse source ids
        SourceIdNameRegistry srcRegistry = _relay.getSourcesIdNameRegistry();
        HashSet<Integer> sourceIds = new HashSet<Integer>();
        if (null != sourcesListStr) {
            String[] sourcesList = sourcesListStr.split(",");
            for (String sourceId : sourcesList) {
                try {
                    Integer srcId = Integer.valueOf(sourceId);
                    sourceIds.add(srcId);
                } catch (NumberFormatException nfe) {
                    HttpStatisticsCollector globalHttpStatsCollector = _relay.getHttpStatisticsCollector();
                    if (null != globalHttpStatsCollector) {
                        globalHttpStatsCollector.registerInvalidStreamRequest();
                    }
                    throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, sourceId);
                }
            }
        }
        //process explicit subscriptions and generate respective logical partition filters
        NavigableSet<PhysicalPartitionKey> ppartKeys = null;
        if (null != subsStr) {
            List<DatabusSubscription.Builder> subsBuilder = null;
            subsBuilder = objMapper.readValue(subsStr, new TypeReference<List<DatabusSubscription.Builder>>() {
            });
            subs = new ArrayList<DatabusSubscription>(subsBuilder.size());
            for (DatabusSubscription.Builder subBuilder : subsBuilder) {
                subs.add(subBuilder.build());
            }
            ppartKeys = new TreeSet<PhysicalPartitionKey>();
            for (DatabusSubscription sub : subs) {
                PhysicalPartition ppart = sub.getPhysicalPartition();
                if (ppart.isAnyPartitionWildcard()) {
                    ppartKeys = _eventBuffer.getAllPhysicalPartitionKeys();
                    break;
                } else {
                    ppartKeys.add(new PhysicalPartitionKey(ppart));
                }
            }
        }
        // Need to make sure that we don't have tests that send requests in this form.
        if (subs != null && checkpointStringMult == null && checkpointString != null) {
            throw new RequestProcessingException("Both Subscriptions and CheckpointMult should be present");
        }
        //convert source ids into subscriptions
        if (null == subs)
            subs = new ArrayList<DatabusSubscription>();
        for (Integer srcId : sourceIds) {
            LogicalSource lsource = srcRegistry.getSource(srcId);
            if (lsource == null)
                throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, srcId.toString());
            if (isDebug)
                LOG.debug("registry returns " + lsource + " for srcid=" + srcId);
            DatabusSubscription newSub = DatabusSubscription.createSimpleSourceSubscription(lsource);
            subs.add(newSub);
        }
        DbusFilter ppartFilters = null;
        if (subs.size() > 0) {
            try {
                ppartFilters = _eventBuffer.constructFilters(subs);
            } catch (DatabusException de) {
                throw new RequestProcessingException("unable to generate physical partitions filters:" + de.getMessage(), de);
            }
        }
        ConjunctionDbusFilter filters = new ConjunctionDbusFilter();
        // Source filter comes first
        if (v2Mode)
            filters.addFilter(new SourceDbusFilter(sourceIds));
        else if (null != ppartFilters)
            filters.addFilter(ppartFilters);
        /*
      // Key range filter comes next
      if ((keyMin >0) && (keyMax > 0))
      {
        filters.addFilter(new KeyRangeFilter(keyMin, keyMax));
      }
      */
        if (null != keyCompositeFilter) {
            filters.addFilter(keyCompositeFilter);
        }
        // need to update registerStreamRequest to support Mult checkpoint TODO (DDSDBUS-80)
        // temp solution
        // 3 options:
        // 1. checkpointStringMult not null - generate checkpoint from it
        // 2. checkpointStringMult null, checkpointString not null - create empty CheckpointMult
        // and add create Checkpoint(checkpointString) and add it to cpMult;
        // 3 both are null - create empty CheckpointMult and add empty Checkpoint to it for each ppartition
        PhysicalPartition pPartition;
        Checkpoint cp = null;
        CheckpointMult cpMult = null;
        if (checkpointStringMult != null) {
            try {
                cpMult = new CheckpointMult(checkpointStringMult);
            } catch (InvalidParameterSpecException e) {
                LOG.error("Invalid CheckpointMult:" + checkpointStringMult, e);
                throw new InvalidRequestParamValueException("stream", "CheckpointMult", checkpointStringMult);
            }
        } else {
            // there is no checkpoint - create an empty one
            cpMult = new CheckpointMult();
            Iterator<Integer> it = sourceIds.iterator();
            while (it.hasNext()) {
                Integer srcId = it.next();
                pPartition = _eventBuffer.getPhysicalPartition(srcId);
                if (pPartition == null)
                    throw new RequestProcessingException("unable to find physical partitions for source:" + srcId);
                if (checkpointString != null) {
                    cp = new Checkpoint(checkpointString);
                } else {
                    cp = new Checkpoint();
                    cp.setFlexible();
                }
                cpMult.addCheckpoint(pPartition, cp);
            }
        }
        if (isDebug)
            LOG.debug("checkpointStringMult = " + checkpointStringMult + ";singlecheckpointString=" + checkpointString + ";CPM=" + cpMult);
        // of the server context.
        if (cpMult.getCursorPartition() == null) {
            cpMult.setCursorPartition(request.getCursorPartition());
        }
        if (isDebug) {
            if (cpMult.getCursorPartition() != null) {
                LOG.debug("Using physical paritition cursor " + cpMult.getCursorPartition());
            }
        }
        // for registerStreamRequest we need a single Checkpoint (TODO - fix it) (DDSDBUS-81)
        if (cp == null) {
            Iterator<Integer> it = sourceIds.iterator();
            if (it.hasNext()) {
                Integer srcId = it.next();
                pPartition = _eventBuffer.getPhysicalPartition(srcId);
                cp = cpMult.getCheckpoint(pPartition);
            } else {
                cp = new Checkpoint();
                cp.setFlexible();
            }
        }
        if (null != checkpointString && isDebug)
            LOG.debug("About to stream from cp: " + checkpointString.toString());
        HttpStatisticsCollector globalHttpStatsCollector = _relay.getHttpStatisticsCollector();
        HttpStatisticsCollector connHttpStatsCollector = null;
        if (null != globalHttpStatsCollector) {
            connHttpStatsCollector = (HttpStatisticsCollector) request.getParams().get(globalHttpStatsCollector.getName());
        }
        if (null != globalHttpStatsCollector)
            globalHttpStatsCollector.registerStreamRequest(cp, sourceIds);
        StatsCollectors<DbusEventsStatisticsCollector> statsCollectors = _relay.getOutBoundStatsCollectors();
        try {
            DbusEventBufferBatchReadable bufRead = v2Mode ? _eventBuffer.getDbusEventBufferBatchReadable(sourceIds, cpMult, statsCollectors) : _eventBuffer.getDbusEventBufferBatchReadable(cpMult, ppartKeys, statsCollectors);
            int eventsRead = 0;
            int minPendingEventSize = 0;
            StreamEventsResult result = null;
            bufRead.setClientMaxEventVersion(clientEventVersion);
            if (v2Mode) {
                result = bufRead.streamEvents(streamFromLatestSCN, fetchSize, request.getResponseContent(), enc, filters);
                eventsRead = result.getNumEventsStreamed();
                minPendingEventSize = result.getSizeOfPendingEvent();
                if (isDebug) {
                    LOG.debug("Process: streamed " + eventsRead + " from sources " + Arrays.toString(sourceIds.toArray()));
                    //can be used for debugging to stream from a cp
                    LOG.debug("CP=" + cpMult);
                }
            //if (null != statsCollectors) statsCollectors.mergeStatsCollectors();
            } else {
                result = bufRead.streamEvents(streamFromLatestSCN, fetchSize, request.getResponseContent(), enc, filters);
                eventsRead = result.getNumEventsStreamed();
                minPendingEventSize = result.getSizeOfPendingEvent();
                if (isDebug)
                    LOG.debug("Process: streamed " + eventsRead + " with subscriptions " + subs);
                cpMult = bufRead.getCheckpointMult();
                if (cpMult != null) {
                    request.setCursorPartition(cpMult.getCursorPartition());
                }
            }
            if (eventsRead == 0 && minPendingEventSize > 0) {
                // Append a header to indicate to the client that we do have at least one event to
                // send, but it is too large to fit into client's offered buffer.
                request.getResponseContent().addMetadata(DatabusHttpHeaders.DATABUS_PENDING_EVENT_SIZE, minPendingEventSize);
                LOG.debug("Returning 0 events but have pending event of size " + minPendingEventSize);
            }
        } catch (ScnNotFoundException snfe) {
            if (null != globalHttpStatsCollector) {
                globalHttpStatsCollector.registerScnNotFoundStreamResponse();
            }
            throw new RequestProcessingException(snfe);
        } catch (OffsetNotFoundException snfe) {
            LOG.error("OffsetNotFound", snfe);
            if (null != globalHttpStatsCollector) {
                globalHttpStatsCollector.registerScnNotFoundStreamResponse();
            }
            throw new RequestProcessingException(snfe);
        }
        if (null != connHttpStatsCollector) {
            connHttpStatsCollector.registerStreamResponse(System.currentTimeMillis() - start);
            globalHttpStatsCollector.merge(connHttpStatsCollector);
            connHttpStatsCollector.reset();
        } else if (null != globalHttpStatsCollector) {
            globalHttpStatsCollector.registerStreamResponse(System.currentTimeMillis() - start);
        }
    } catch (InvalidRequestParamValueException e) {
        HttpStatisticsCollector globalHttpStatsCollector = _relay.getHttpStatisticsCollector();
        if (null != globalHttpStatsCollector) {
            globalHttpStatsCollector.registerInvalidStreamRequest();
        }
        throw e;
    }
    return request;
}
Also used : CheckpointMult(com.linkedin.databus.core.CheckpointMult) SourceDbusFilter(com.linkedin.databus2.core.filter.SourceDbusFilter) ArrayList(java.util.ArrayList) DbusEventsStatisticsCollector(com.linkedin.databus.core.monitoring.mbean.DbusEventsStatisticsCollector) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) HttpStatisticsCollector(com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector) ScnNotFoundException(com.linkedin.databus.core.ScnNotFoundException) RequestProcessingException(com.linkedin.databus2.core.container.request.RequestProcessingException) TypeReference(org.codehaus.jackson.type.TypeReference) InvalidParameterSpecException(java.security.spec.InvalidParameterSpecException) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) PhysicalPartition(com.linkedin.databus.core.data_model.PhysicalPartition) HashSet(java.util.HashSet) StreamEventsResult(com.linkedin.databus.core.StreamEventsResult) Encoding(com.linkedin.databus.core.Encoding) SourceIdNameRegistry(com.linkedin.databus2.schemas.SourceIdNameRegistry) DatabusSubscription(com.linkedin.databus.core.data_model.DatabusSubscription) ConjunctionDbusFilter(com.linkedin.databus2.core.filter.ConjunctionDbusFilter) SourceDbusFilter(com.linkedin.databus2.core.filter.SourceDbusFilter) DbusFilter(com.linkedin.databus2.core.filter.DbusFilter) InvalidRequestParamValueException(com.linkedin.databus2.core.container.request.InvalidRequestParamValueException) Checkpoint(com.linkedin.databus.core.Checkpoint) DbusKeyFilter(com.linkedin.databus2.core.filter.DbusKeyFilter) InvalidRequestParamValueException(com.linkedin.databus2.core.container.request.InvalidRequestParamValueException) ScnNotFoundException(com.linkedin.databus.core.ScnNotFoundException) InvalidParameterSpecException(java.security.spec.InvalidParameterSpecException) OffsetNotFoundException(com.linkedin.databus.core.OffsetNotFoundException) DatabusException(com.linkedin.databus2.core.DatabusException) IOException(java.io.IOException) RequestProcessingException(com.linkedin.databus2.core.container.request.RequestProcessingException) Checkpoint(com.linkedin.databus.core.Checkpoint) DbusEventBufferBatchReadable(com.linkedin.databus.core.DbusEventBufferBatchReadable) PhysicalPartitionKey(com.linkedin.databus.core.DbusEventBufferMult.PhysicalPartitionKey) DatabusException(com.linkedin.databus2.core.DatabusException) OffsetNotFoundException(com.linkedin.databus.core.OffsetNotFoundException) ConjunctionDbusFilter(com.linkedin.databus2.core.filter.ConjunctionDbusFilter) DbusKeyCompositeFilter(com.linkedin.databus2.core.filter.DbusKeyCompositeFilter)

Example 3 with DbusFilter

use of com.linkedin.databus2.core.filter.DbusFilter in project databus by linkedin.

the class TestDbusEventBufferMult method testSubscriptionStream.

@Test
public void testSubscriptionStream() throws Exception {
    final Logger log = Logger.getLogger("TestDbusEventBufferMult.testSubscriptionStream");
    log.info("start");
    TestSetup t = new TestSetup();
    PhysicalPartition pp100 = new PhysicalPartition(100, "multBufferTest1");
    PhysicalPartitionKey pk1 = new PhysicalPartitionKey(pp100);
    PhysicalPartition pp101 = new PhysicalPartition(101, "multBufferTest2");
    PhysicalPartitionKey pk2 = new PhysicalPartitionKey(pp101);
    //generate events in pp100
    byte[] schema = "abcdefghijklmnop".getBytes(Charset.defaultCharset());
    DbusEventBufferAppendable buf100 = t._eventBuffer.getDbusEventBufferAppendable(pp100);
    buf100.startEvents();
    assertTrue(buf100.appendEvent(new DbusEventKey(1), (short) 100, (short) 0, System.currentTimeMillis() * 1000000, (short) 1, schema, new byte[100], false, null));
    assertTrue(buf100.appendEvent(new DbusEventKey(10), (short) 100, (short) 0, System.currentTimeMillis() * 1000000, (short) 1, schema, new byte[100], false, null));
    assertTrue(buf100.appendEvent(new DbusEventKey(11), (short) 100, (short) 0, System.currentTimeMillis() * 1000000, (short) 1, schema, new byte[100], false, null));
    assertTrue(buf100.appendEvent(new DbusEventKey(2), (short) 100, (short) 0, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
    buf100.endEvents(100, null);
    buf100.startEvents();
    assertTrue(buf100.appendEvent(new DbusEventKey(3), (short) 100, (short) 0, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
    assertTrue(buf100.appendEvent(new DbusEventKey(4), (short) 100, (short) 1, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
    buf100.endEvents(200, null);
    //generate events in pp100
    DbusEventBufferAppendable buf101 = t._eventBuffer.getDbusEventBufferAppendable(pp101);
    buf101.startEvents();
    assertTrue(buf101.appendEvent(new DbusEventKey(51), (short) 101, (short) 0, System.currentTimeMillis() * 1000000, (short) 11, schema, new byte[100], false, null));
    assertTrue(buf101.appendEvent(new DbusEventKey(52), (short) 101, (short) 0, System.currentTimeMillis() * 1000000, (short) 12, schema, new byte[100], false, null));
    assertTrue(buf101.appendEvent(new DbusEventKey(53), (short) 101, (short) 2, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
    buf101.endEvents(120, null);
    buf101.startEvents();
    assertTrue(buf101.appendEvent(new DbusEventKey(54), (short) 101, (short) 2, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
    assertTrue(buf101.appendEvent(new DbusEventKey(55), (short) 101, (short) 2, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
    assertTrue(buf101.appendEvent(new DbusEventKey(56), (short) 101, (short) 2, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
    buf101.endEvents(200, null);
    //initialization
    DatabusSubscription sub1 = DatabusSubscription.createPhysicalPartitionReplicationSubscription(new PhysicalPartition(100, "multBufferTest1"));
    DbusFilter filter1 = t._eventBuffer.constructFilters(Arrays.asList(sub1));
    assertNotNull(filter1);
    CheckpointMult cpMult1 = new CheckpointMult();
    Checkpoint cp100 = new Checkpoint();
    cp100.init();
    cp100.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION);
    cp100.setWindowScn(10L);
    cp100.setWindowOffset(-1);
    cpMult1.addCheckpoint(pp100, cp100);
    String[] pnames = { "multBufferTest1:100", "multBufferTest2:101" };
    StatsCollectors<DbusEventsStatisticsCollector> statsColls1 = createStats(pnames);
    DbusEventsStatisticsCollector statsCol1 = statsColls1.getStatsCollector("multBufferTest1:100");
    DbusEventsStatisticsCollector statsCol2 = statsColls1.getStatsCollector("multBufferTest2:101");
    //read an entire buffer
    DbusEventBufferBatchReadable reader1 = t._eventBuffer.getDbusEventBufferBatchReadable(cpMult1, Arrays.asList(pk1), statsColls1);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    // Try a call with 20 bytes of fetch size, we should see the event size in the first return with 0 events read.
    StreamEventsResult result = reader1.streamEvents(false, 20, Channels.newChannel(baos), Encoding.BINARY, filter1);
    assertEquals(0, result.getNumEventsStreamed());
    assertEquals(161, result.getSizeOfPendingEvent());
    result = reader1.streamEvents(false, 1000000, Channels.newChannel(baos), Encoding.BINARY, filter1);
    int eventsRead = result.getNumEventsStreamed();
    //4 events + 1 eop + 2 events + 1 eop
    assertEquals(eventsRead, 8);
    assertEquals(statsColls1.getStatsCollector("multBufferTest1:100").getTotalStats().getNumSysEvents(), 2);
    assertEquals(statsColls1.getStatsCollector("multBufferTest1:100").getTotalStats().getNumDataEvents(), 6);
    assertEquals(result.getSizeOfPendingEvent(), 0, "Size of pending event not zero");
    // Now that we have read all the events, we should not see a pending event even if we offer a small fetch size.
    result = reader1.streamEvents(false, 20, Channels.newChannel(baos), Encoding.BINARY, filter1);
    assertEquals(0, result.getNumEventsStreamed(), "There should be no more events in the buffer now");
    assertEquals(0, result.getSizeOfPendingEvent(), "We should not see pending event size since there are no events in buffer");
    baos.reset();
    statsCol1.reset();
    statsCol2.reset();
    //read from two buffers, filtering out one
    cpMult1 = new CheckpointMult();
    cp100.init();
    cp100.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION);
    cp100.setWindowScn(10L);
    cp100.setWindowOffset(-1);
    cpMult1.addCheckpoint(pp100, cp100);
    reader1 = t._eventBuffer.getDbusEventBufferBatchReadable(cpMult1, Arrays.asList(pk1, pk2), statsColls1);
    eventsRead = reader1.streamEvents(false, 1000000, Channels.newChannel(baos), Encoding.BINARY, filter1).getNumEventsStreamed();
    //4 events + 1 eop + 1 eop from the other buffer + 2 events +
    assertEquals(eventsRead, 10);
    //1 eop + 1 eop from the other buffer
    assertEquals(statsColls1.getStatsCollector("multBufferTest1:100").getTotalStats().getNumSysEvents(), 2);
    assertEquals(statsColls1.getStatsCollector("multBufferTest1:100").getTotalStats().getNumDataEvents(), 6);
    baos.reset();
    statsCol1.reset();
    //read from one buffer and one source partition
    DatabusSubscription sub2 = new DatabusSubscription(PhysicalSource.MASTER_PHISYCAL_SOURCE, new PhysicalPartition(101, "multBufferTest2"), new LogicalSourceId(new LogicalSource(2, "srcName2"), (short) 2));
    DbusFilter filter2 = t._eventBuffer.constructFilters(Arrays.asList(sub2));
    assertNotNull(filter2);
    CheckpointMult cpMult2 = new CheckpointMult();
    Checkpoint cp101 = new Checkpoint();
    cp101.init();
    cp101.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION);
    cp101.setWindowScn(10L);
    cp101.setWindowOffset(-1);
    cpMult2.addCheckpoint(pp101, cp101);
    DbusEventBufferBatchReadable reader2 = t._eventBuffer.getDbusEventBufferBatchReadable(cpMult2, Arrays.asList(pk2), statsColls1);
    eventsRead = reader2.streamEvents(false, 1000000, Channels.newChannel(baos), Encoding.BINARY, filter2).getNumEventsStreamed();
    //1 events + 1 eop + 3events + 1 eop
    assertEquals(eventsRead, 6);
    baos.reset();
    statsCol1.reset();
    statsCol2.reset();
    //read all partitions for a source
    DatabusSubscription sub3 = new DatabusSubscription(PhysicalSource.MASTER_PHISYCAL_SOURCE, PhysicalPartition.ANY_PHYSICAL_PARTITION, LogicalSourceId.createAllPartitionsWildcard(new LogicalSource(2, "srcName2")));
    DbusFilter filter3 = t._eventBuffer.constructFilters(Arrays.asList(sub3));
    assertNotNull(filter3);
    CheckpointMult cpMult3 = new CheckpointMult();
    cp100.init();
    cp100.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION);
    cp100.setWindowScn(10L);
    cp100.setWindowOffset(-1);
    cpMult1.addCheckpoint(pp100, cp100);
    cp101.init();
    cp101.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION);
    cp101.setWindowScn(10L);
    cp101.setWindowOffset(-1);
    cpMult2.addCheckpoint(pp101, cp101);
    DbusEventBufferBatchReadable reader3 = t._eventBuffer.getDbusEventBufferBatchReadable(cpMult3, Arrays.asList(pk1, pk2), statsColls1);
    eventsRead = reader3.streamEvents(false, 1000000, Channels.newChannel(baos), Encoding.BINARY, filter3).getNumEventsStreamed();
    //1 events + 1 eop + 1 events + 1 eop + 2 events + 1 eop + 3 events + 1 eop
    assertEquals(eventsRead, 11);
    assertEquals(statsColls1.getStatsCollector("multBufferTest1:100").getTotalStats().getNumSysEvents(), 2);
    assertEquals(statsColls1.getStatsCollector("multBufferTest2:101").getTotalStats().getNumSysEvents(), 2);
    assertEquals(statsColls1.getStatsCollector("multBufferTest1:100").getTotalStats().getNumDataEventsFiltered(), 3);
    assertEquals(statsColls1.getStatsCollector("multBufferTest2:101").getTotalStats().getNumDataEventsFiltered(), 4);
    baos.reset();
    statsCol1.reset();
    statsCol2.reset();
    log.info("end");
}
Also used : LogicalSourceId(com.linkedin.databus.core.data_model.LogicalSourceId) DbusEventsStatisticsCollector(com.linkedin.databus.core.monitoring.mbean.DbusEventsStatisticsCollector) AggregatedDbusEventsStatisticsCollector(com.linkedin.databus.core.monitoring.mbean.AggregatedDbusEventsStatisticsCollector) ByteArrayOutputStream(java.io.ByteArrayOutputStream) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) Logger(org.apache.log4j.Logger) DatabusSubscription(com.linkedin.databus.core.data_model.DatabusSubscription) ConjunctionDbusFilter(com.linkedin.databus2.core.filter.ConjunctionDbusFilter) SourceDbusFilter(com.linkedin.databus2.core.filter.SourceDbusFilter) AllowAllDbusFilter(com.linkedin.databus2.core.filter.AllowAllDbusFilter) LogicalSourceAndPartitionDbusFilter(com.linkedin.databus2.core.filter.LogicalSourceAndPartitionDbusFilter) PhysicalPartitionDbusFilter(com.linkedin.databus2.core.filter.PhysicalPartitionDbusFilter) DbusFilter(com.linkedin.databus2.core.filter.DbusFilter) PhysicalPartitionKey(com.linkedin.databus.core.DbusEventBufferMult.PhysicalPartitionKey) PhysicalPartition(com.linkedin.databus.core.data_model.PhysicalPartition) Test(org.testng.annotations.Test) BeforeTest(org.testng.annotations.BeforeTest)

Example 4 with DbusFilter

use of com.linkedin.databus2.core.filter.DbusFilter in project databus by linkedin.

the class TestDbusEventBufferMult method testConstructFilters.

@Test
public void testConstructFilters() throws Exception {
    TestSetup t = new TestSetup();
    //test single Physical Partition subscription
    DatabusSubscription sub1 = DatabusSubscription.createPhysicalPartitionReplicationSubscription(new PhysicalPartition(100, "multBufferTest1"));
    DbusFilter filter1 = t._eventBuffer.constructFilters(Arrays.asList(sub1));
    assertNotNull(filter1);
    assertTrue(filter1 instanceof PhysicalPartitionDbusFilter);
    PhysicalPartitionDbusFilter ppfilter1 = (PhysicalPartitionDbusFilter) filter1;
    assertEquals(ppfilter1.getPhysicalPartition(), new PhysicalPartition(100, "multBufferTest1"));
    assertNull(ppfilter1.getNestedFilter());
    DatabusSubscription sub2 = DatabusSubscription.createPhysicalPartitionReplicationSubscription(new PhysicalPartition(101, "multBufferTest2"));
    //test two Physical Partition subscriptions
    DbusFilter filter2 = t._eventBuffer.constructFilters(Arrays.asList(sub1, sub2));
    assertNotNull(filter2);
    assertTrue(filter2 instanceof ConjunctionDbusFilter);
    ConjunctionDbusFilter conjFilter2 = (ConjunctionDbusFilter) filter2;
    boolean hasPP100 = false;
    boolean hasPP101 = false;
    assertEquals(conjFilter2.getFilterList().size(), 2);
    for (DbusFilter f : conjFilter2.getFilterList()) {
        assertTrue(f instanceof PhysicalPartitionDbusFilter);
        PhysicalPartitionDbusFilter ppf = (PhysicalPartitionDbusFilter) f;
        if (ppf.getPhysicalPartition().getId() == 100)
            hasPP100 = true;
        else if (ppf.getPhysicalPartition().getId() == 101)
            hasPP101 = true;
        else
            fail("unknown physical partition filter:" + ppf.getPhysicalPartition());
    }
    assertTrue(hasPP100);
    assertTrue(hasPP101);
    //test a subcription with a logical source
    DatabusSubscription sub3 = DatabusSubscription.createSimpleSourceSubscription(new LogicalSource(2, "srcName2"));
    DbusFilter filter3 = t._eventBuffer.constructFilters(Arrays.asList(sub3));
    assertNotNull(filter3);
    assertTrue(filter3 instanceof PhysicalPartitionDbusFilter);
    PhysicalPartitionDbusFilter ppfilter3 = (PhysicalPartitionDbusFilter) filter3;
    assertEquals(ppfilter3.getPhysicalPartition(), PhysicalPartition.ANY_PHYSICAL_PARTITION);
    DbusFilter ppfilter3_child = ppfilter3.getNestedFilter();
    assertNotNull(ppfilter3_child);
    assertTrue(ppfilter3_child instanceof LogicalSourceAndPartitionDbusFilter);
    LogicalSourceAndPartitionDbusFilter lsourceFilter3 = (LogicalSourceAndPartitionDbusFilter) ppfilter3_child;
    LogicalSourceAndPartitionDbusFilter.LogicalPartitionDbusFilter lpartFilter3_1 = lsourceFilter3.getSourceFilter(2);
    assertNotNull(lpartFilter3_1);
    assertTrue(lpartFilter3_1.isAllPartitionsWildcard());
    //test a subcription with a physical and logical partition
    DatabusSubscription sub4 = new DatabusSubscription(PhysicalSource.MASTER_PHISYCAL_SOURCE, new PhysicalPartition(101, "multBufferTest2"), new LogicalSourceId(new LogicalSource(2, "srcName2"), (short) 2));
    DbusFilter filter4 = t._eventBuffer.constructFilters(Arrays.asList(sub4));
    assertNotNull(filter4);
    assertTrue(filter4 instanceof PhysicalPartitionDbusFilter);
    PhysicalPartitionDbusFilter ppfilter4 = (PhysicalPartitionDbusFilter) filter4;
    assertEquals(ppfilter4.getPhysicalPartition(), new PhysicalPartition(101, "multBufferTest2"));
    DbusFilter ppfilter4_child = ppfilter4.getNestedFilter();
    assertNotNull(ppfilter4_child);
    assertTrue(ppfilter4_child instanceof LogicalSourceAndPartitionDbusFilter);
    LogicalSourceAndPartitionDbusFilter lsourceFilter4 = (LogicalSourceAndPartitionDbusFilter) ppfilter4_child;
    LogicalSourceAndPartitionDbusFilter.LogicalPartitionDbusFilter lpartFilter4_1 = lsourceFilter4.getSourceFilter(2);
    assertNotNull(lpartFilter4_1);
    assertTrue(lpartFilter4_1.getPartitionsMask().contains(2));
}
Also used : PhysicalPartitionDbusFilter(com.linkedin.databus2.core.filter.PhysicalPartitionDbusFilter) ConjunctionDbusFilter(com.linkedin.databus2.core.filter.ConjunctionDbusFilter) LogicalSourceId(com.linkedin.databus.core.data_model.LogicalSourceId) LogicalSourceAndPartitionDbusFilter(com.linkedin.databus2.core.filter.LogicalSourceAndPartitionDbusFilter) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) DatabusSubscription(com.linkedin.databus.core.data_model.DatabusSubscription) ConjunctionDbusFilter(com.linkedin.databus2.core.filter.ConjunctionDbusFilter) SourceDbusFilter(com.linkedin.databus2.core.filter.SourceDbusFilter) AllowAllDbusFilter(com.linkedin.databus2.core.filter.AllowAllDbusFilter) LogicalSourceAndPartitionDbusFilter(com.linkedin.databus2.core.filter.LogicalSourceAndPartitionDbusFilter) PhysicalPartitionDbusFilter(com.linkedin.databus2.core.filter.PhysicalPartitionDbusFilter) DbusFilter(com.linkedin.databus2.core.filter.DbusFilter) PhysicalPartition(com.linkedin.databus.core.data_model.PhysicalPartition) Test(org.testng.annotations.Test) BeforeTest(org.testng.annotations.BeforeTest)

Example 5 with DbusFilter

use of com.linkedin.databus2.core.filter.DbusFilter in project databus by linkedin.

the class DbusEventBufferMult method constructFilters.

/**
   * Processes all {@link DatabusSubscription} and generates a filter to match events for any of
   * those subscriptions.
   */
public DbusFilter constructFilters(Collection<DatabusSubscription> subs) throws DatabusException {
    HashMap<PhysicalPartition, PhysicalPartitionDbusFilter> filterMap = null;
    for (DatabusSubscription sub : subs) {
        PhysicalPartition ppart = sub.getPhysicalPartition();
        if (sub.getLogicalSource().isWildcard()) {
            if (!ppart.isWildcard()) {
                if (null == filterMap)
                    filterMap = new HashMap<PhysicalPartition, PhysicalPartitionDbusFilter>(10);
                filterMap.put(ppart, new PhysicalPartitionDbusFilter(ppart, null));
            } else {
                LOG.warn("ignoring subscription with both physical partition and logical source wildcards");
            }
        } else {
            PhysicalPartitionDbusFilter ppartFilter = null != filterMap ? filterMap.get(ppart) : null;
            LogicalSourceAndPartitionDbusFilter logFilter = null;
            if (null == ppartFilter) {
                logFilter = new LogicalSourceAndPartitionDbusFilter();
                ppartFilter = new PhysicalPartitionDbusFilter(ppart, logFilter);
                if (null == filterMap)
                    filterMap = new HashMap<PhysicalPartition, PhysicalPartitionDbusFilter>(10);
                filterMap.put(ppart, ppartFilter);
            } else {
                logFilter = (LogicalSourceAndPartitionDbusFilter) ppartFilter.getNestedFilter();
            }
            if (null != logFilter)
                logFilter.addSourceCondition(sub.getLogicalPartition());
            else
                LOG.error("unexpected null filter for logical source");
        }
    }
    if (0 == filterMap.size())
        return AllowAllDbusFilter.THE_INSTANCE;
    else if (1 == filterMap.size()) {
        DbusFilter result = filterMap.entrySet().iterator().next().getValue();
        return result;
    } else {
        ConjunctionDbusFilter result = new ConjunctionDbusFilter();
        for (Map.Entry<PhysicalPartition, PhysicalPartitionDbusFilter> filterEntry : filterMap.entrySet()) {
            result.addFilter(filterEntry.getValue());
        }
        return result;
    }
}
Also used : PhysicalPartitionDbusFilter(com.linkedin.databus2.core.filter.PhysicalPartitionDbusFilter) HashMap(java.util.HashMap) ConjunctionDbusFilter(com.linkedin.databus2.core.filter.ConjunctionDbusFilter) LogicalSourceAndPartitionDbusFilter(com.linkedin.databus2.core.filter.LogicalSourceAndPartitionDbusFilter) DatabusSubscription(com.linkedin.databus.core.data_model.DatabusSubscription) ConjunctionDbusFilter(com.linkedin.databus2.core.filter.ConjunctionDbusFilter) AllowAllDbusFilter(com.linkedin.databus2.core.filter.AllowAllDbusFilter) LogicalSourceAndPartitionDbusFilter(com.linkedin.databus2.core.filter.LogicalSourceAndPartitionDbusFilter) PhysicalPartitionDbusFilter(com.linkedin.databus2.core.filter.PhysicalPartitionDbusFilter) DbusFilter(com.linkedin.databus2.core.filter.DbusFilter) PhysicalPartition(com.linkedin.databus.core.data_model.PhysicalPartition)

Aggregations

DbusFilter (com.linkedin.databus2.core.filter.DbusFilter)5 DatabusSubscription (com.linkedin.databus.core.data_model.DatabusSubscription)4 PhysicalPartition (com.linkedin.databus.core.data_model.PhysicalPartition)4 ConjunctionDbusFilter (com.linkedin.databus2.core.filter.ConjunctionDbusFilter)4 LogicalSource (com.linkedin.databus.core.data_model.LogicalSource)3 AllowAllDbusFilter (com.linkedin.databus2.core.filter.AllowAllDbusFilter)3 LogicalSourceAndPartitionDbusFilter (com.linkedin.databus2.core.filter.LogicalSourceAndPartitionDbusFilter)3 PhysicalPartitionDbusFilter (com.linkedin.databus2.core.filter.PhysicalPartitionDbusFilter)3 SourceDbusFilter (com.linkedin.databus2.core.filter.SourceDbusFilter)3 Checkpoint (com.linkedin.databus.core.Checkpoint)2 PhysicalPartitionKey (com.linkedin.databus.core.DbusEventBufferMult.PhysicalPartitionKey)2 LogicalSourceId (com.linkedin.databus.core.data_model.LogicalSourceId)2 DbusEventsStatisticsCollector (com.linkedin.databus.core.monitoring.mbean.DbusEventsStatisticsCollector)2 ArrayList (java.util.ArrayList)2 BeforeTest (org.testng.annotations.BeforeTest)2 Test (org.testng.annotations.Test)2 CheckpointMult (com.linkedin.databus.core.CheckpointMult)1 DbusEventBufferBatchReadable (com.linkedin.databus.core.DbusEventBufferBatchReadable)1 Encoding (com.linkedin.databus.core.Encoding)1 OffsetNotFoundException (com.linkedin.databus.core.OffsetNotFoundException)1