Search in sources :

Example 51 with ConsumerConfig

use of co.cask.cdap.data2.queue.ConsumerConfig in project cdap by caskdata.

the class DequeueScanObserver method preScannerOpen.

@Override
public RegionScanner preScannerOpen(ObserverContext<RegionCoprocessorEnvironment> e, Scan scan, RegionScanner s) throws IOException {
    ConsumerConfig consumerConfig = DequeueScanAttributes.getConsumerConfig(scan);
    Transaction tx = DequeueScanAttributes.getTx(scan);
    if (consumerConfig == null || tx == null) {
        return super.preScannerOpen(e, scan, s);
    }
    Filter dequeueFilter = new DequeueFilter(consumerConfig, tx);
    Filter existing = scan.getFilter();
    if (existing != null) {
        Filter combined = new FilterList(FilterList.Operator.MUST_PASS_ALL, existing, dequeueFilter);
        scan.setFilter(combined);
    } else {
        scan.setFilter(dequeueFilter);
    }
    return super.preScannerOpen(e, scan, s);
}
Also used : Transaction(org.apache.tephra.Transaction) Filter(org.apache.hadoop.hbase.filter.Filter) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 52 with ConsumerConfig

use of co.cask.cdap.data2.queue.ConsumerConfig in project cdap by caskdata.

the class FlowletProgramRunner method createProcessSpecification.

/**
   * Creates all {@link ProcessSpecification} for the process methods of the flowlet class.
   *
   * @param flowletType Type of the flowlet class represented by {@link TypeToken}.
   * @param processMethodFactory A {@link ProcessMethodFactory} for creating {@link ProcessMethod}.
   * @param processSpecFactory A {@link ProcessSpecificationFactory} for creating {@link ProcessSpecification}.
   * @param result A {@link Collection} for storing newly created {@link ProcessSpecification}.
   * @return The same {@link Collection} as the {@code result} parameter.
   */
@SuppressWarnings("unchecked")
private <T extends Collection<ProcessSpecification<?>>> T createProcessSpecification(BasicFlowletContext flowletContext, TypeToken<? extends Flowlet> flowletType, ProcessMethodFactory processMethodFactory, ProcessSpecificationFactory processSpecFactory, T result) throws Exception {
    Set<FlowletMethod> seenMethods = Sets.newHashSet();
    // Walk up the hierarchy of flowlet class to get all ProcessInput and Tick methods
    for (TypeToken<?> type : flowletType.getTypes().classes()) {
        if (type.getRawType().equals(Object.class)) {
            break;
        }
        // Extracts all process and tick methods
        for (Method method : type.getRawType().getDeclaredMethods()) {
            if (method.isSynthetic() || method.isBridge()) {
                continue;
            }
            if (!seenMethods.add(FlowletMethod.create(method, flowletType.getType()))) {
                // up the class hierarchy.
                continue;
            }
            ProcessInput processInputAnnotation = method.getAnnotation(ProcessInput.class);
            Tick tickAnnotation = method.getAnnotation(Tick.class);
            if (processInputAnnotation == null && tickAnnotation == null) {
                // Neither a process nor a tick method.
                continue;
            }
            int maxRetries = (tickAnnotation == null) ? processInputAnnotation.maxRetries() : tickAnnotation.maxRetries();
            ProcessMethod processMethod = processMethodFactory.create(method, maxRetries);
            Set<String> inputNames;
            Schema schema;
            TypeToken<?> dataType;
            ConsumerConfig consumerConfig;
            int batchSize = 1;
            if (tickAnnotation != null) {
                inputNames = ImmutableSet.of();
                consumerConfig = new ConsumerConfig(0, 0, 1, DequeueStrategy.FIFO, null);
                schema = Schema.of(Schema.Type.NULL);
                dataType = TypeToken.of(void.class);
            } else {
                inputNames = Sets.newHashSet(processInputAnnotation.value());
                if (inputNames.isEmpty()) {
                    // If there is no input name, it would be ANY_INPUT
                    inputNames.add(FlowletDefinition.ANY_INPUT);
                }
                // If batch mode then generate schema for Iterator's parameter type
                dataType = flowletType.resolveType(method.getGenericParameterTypes()[0]);
                consumerConfig = getConsumerConfig(flowletContext, method);
                Integer processBatchSize = getBatchSize(method, flowletContext);
                if (processBatchSize != null) {
                    if (dataType.getRawType().equals(Iterator.class)) {
                        Preconditions.checkArgument(dataType.getType() instanceof ParameterizedType, "Only ParameterizedType is supported for batch Iterator.");
                        dataType = flowletType.resolveType(((ParameterizedType) dataType.getType()).getActualTypeArguments()[0]);
                    }
                    batchSize = processBatchSize;
                }
                try {
                    schema = schemaGenerator.generate(dataType.getType());
                } catch (UnsupportedTypeException e) {
                    throw Throwables.propagate(e);
                }
            }
            ProcessSpecification processSpec = processSpecFactory.create(inputNames, schema, dataType, processMethod, consumerConfig, batchSize, tickAnnotation);
            // Add processSpec
            if (processSpec != null) {
                result.add(processSpec);
            }
        }
    }
    Preconditions.checkArgument(!result.isEmpty(), "No inputs found for flowlet '%s' of flow '%s' of application '%s' (%s)", flowletContext.getFlowletId(), flowletContext.getFlowId(), flowletContext.getApplicationId(), flowletType);
    return result;
}
Also used : Schema(co.cask.cdap.api.data.schema.Schema) FlowletMethod(co.cask.cdap.internal.specification.FlowletMethod) Method(java.lang.reflect.Method) FlowletMethod(co.cask.cdap.internal.specification.FlowletMethod) ParameterizedType(java.lang.reflect.ParameterizedType) ProcessInput(co.cask.cdap.api.annotation.ProcessInput) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) Tick(co.cask.cdap.api.annotation.Tick) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig)

Example 53 with ConsumerConfig

use of co.cask.cdap.data2.queue.ConsumerConfig in project cdap by caskdata.

the class ConsumerSupplier method open.

/**
   * Updates number of instances for the consumer group that this instance belongs to. It'll close existing
   * consumer and create a new one with the new group size.
   *
   * @param groupSize New group size.
   */
void open(int groupSize) {
    try {
        close();
        ConsumerConfig config = consumerConfig;
        if (groupSize != config.getGroupSize()) {
            config = new ConsumerConfig(consumerConfig.getGroupId(), consumerConfig.getInstanceId(), groupSize, consumerConfig.getDequeueStrategy(), consumerConfig.getHashKey());
        }
        if (queueName.isQueue()) {
            QueueConsumer queueConsumer = dataFabricFacade.createConsumer(queueName, config, numGroups);
            consumerConfig = queueConsumer.getConfig();
            consumer = queueConsumer;
        } else {
            StreamId queueStream = queueName.toStreamId();
            for (EntityId owner : owners) {
                try {
                    runtimeUsageRegistry.register(owner, queueStream);
                } catch (Exception e) {
                    LOG.warn("Failed to register usage of {} -> {}", owner, queueStream, e);
                }
            }
            StreamConsumer streamConsumer = dataFabricFacade.createStreamConsumer(queueName.toStreamId(), config);
            consumerConfig = streamConsumer.getConsumerConfig();
            consumer = streamConsumer;
        }
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}
Also used : EntityId(co.cask.cdap.proto.id.EntityId) StreamConsumer(co.cask.cdap.data2.transaction.stream.StreamConsumer) StreamId(co.cask.cdap.proto.id.StreamId) QueueConsumer(co.cask.cdap.data2.queue.QueueConsumer) ConsumerConfig(co.cask.cdap.data2.queue.ConsumerConfig) IOException(java.io.IOException)

Aggregations

ConsumerConfig (co.cask.cdap.data2.queue.ConsumerConfig)33 QueueConsumer (co.cask.cdap.data2.queue.QueueConsumer)17 TransactionContext (org.apache.tephra.TransactionContext)14 Test (org.junit.Test)14 QueueName (co.cask.cdap.common.queue.QueueName)12 ConsumerGroupConfig (co.cask.cdap.data2.queue.ConsumerGroupConfig)12 QueueEntry (co.cask.cdap.data2.queue.QueueEntry)8 Transaction (org.apache.tephra.Transaction)8 StreamEvent (co.cask.cdap.api.flow.flowlet.StreamEvent)7 QueueProducer (co.cask.cdap.data2.queue.QueueProducer)7 QueueEntryRow (co.cask.cdap.data2.transaction.queue.QueueEntryRow)7 StreamId (co.cask.cdap.proto.id.StreamId)7 Cell (org.apache.hadoop.hbase.Cell)7 Filter (org.apache.hadoop.hbase.filter.Filter)7 FilterList (org.apache.hadoop.hbase.filter.FilterList)7 IOException (java.io.IOException)6 TransactionFailureException (org.apache.tephra.TransactionFailureException)6 TransactionExecutor (org.apache.tephra.TransactionExecutor)5 DequeueResult (co.cask.cdap.data2.queue.DequeueResult)4 DequeueStrategy (co.cask.cdap.data2.queue.DequeueStrategy)3