Search in sources :

Example 26 with StreamSchema

use of com.ibm.streams.operator.StreamSchema in project streamsx.kafka by IBMStreams.

the class AbstractKafkaConsumerOperator method checkParams.

@ContextCheck(compile = false, runtime = true)
public static void checkParams(OperatorContextChecker checker) {
    StreamSchema streamSchema = checker.getOperatorContext().getStreamingOutputs().get(0).getStreamSchema();
    Set<String> paramNames = checker.getOperatorContext().getParameterNames();
    String messageAttrName = paramNames.contains(OUTPUT_MESSAGE_ATTRIBUTE_NAME_PARAM) ? // $NON-NLS-1$
    checker.getOperatorContext().getParameterValues(OUTPUT_MESSAGE_ATTRIBUTE_NAME_PARAM).get(0) : DEFAULT_OUTPUT_MESSAGE_ATTR_NAME;
    // set invalid context if message attribute name does not exist
    Attribute messageAttr = streamSchema.getAttribute(messageAttrName);
    if (messageAttr == null) {
        // $NON-NLS-1$
        checker.setInvalidContext(Messages.getString("OUTPUT_MESSAGE_ATTRIBUTE_MISSING"), new Object[0]);
    } else {
        // validate the attribute type
        checker.checkAttributeType(messageAttr, SUPPORTED_ATTR_TYPES);
    }
    // check that user-specified key attr name exists
    Attribute keyAttr;
    if (paramNames.contains(OUTPUT_KEY_ATTRIBUTE_NAME_PARAM)) {
        String keyAttrName = checker.getOperatorContext().getParameterValues(OUTPUT_KEY_ATTRIBUTE_NAME_PARAM).get(0);
        keyAttr = streamSchema.getAttribute(keyAttrName);
        if (keyAttr == null) {
            // $NON-NLS-1$
            checker.setInvalidContext(Messages.getString("OUTPUT_ATTRIBUTE_NOT_FOUND", keyAttrName), new Object[0]);
        }
    } else {
        keyAttr = streamSchema.getAttribute(DEFAULT_OUTPUT_KEY_ATTR_NAME);
    }
    // validate the attribute type
    if (keyAttr != null)
        checker.checkAttributeType(keyAttr, SUPPORTED_ATTR_TYPES);
    // check that the user-specified topic attr name exists
    checkUserSpecifiedAttributeNameExists(checker, OUTPUT_TOPIC_ATTRIBUTE_NAME_PARAM);
    // check that the user-specified timestamp attr name exists
    checkUserSpecifiedAttributeNameExists(checker, OUTPUT_TIMESTAMP_ATTRIBUTE_NAME_PARAM);
    // check that the user-specified offset attr name exists
    checkUserSpecifiedAttributeNameExists(checker, OUTPUT_OFFSET_ATTRIBUTE_NAME_PARAM);
    // check that the user-specified partition attr name exists
    checkUserSpecifiedAttributeNameExists(checker, OUTPUT_PARTITION_ATTRIBUTE_NAME_PARAM);
    if (paramNames.contains(START_POSITION_PARAM)) {
        String startPositionValue = checker.getOperatorContext().getParameterValues(START_POSITION_PARAM).get(0);
        if (startPositionValue.equals(StartPosition.Time.name())) {
            // check that the startTime param exists if the startPosition param is set to 'Time'
            if (!paramNames.contains(START_TIME_PARAM)) {
                // $NON-NLS-1$
                checker.setInvalidContext(Messages.getString("START_TIME_PARAM_NOT_FOUND"), new Object[0]);
            }
        } else if (startPositionValue.equals(StartPosition.Offset.name())) {
            // check that the startOffset param exists if the startPosition param is set to 'Offset
            if (!paramNames.contains(START_OFFSET_PARAM)) {
                // $NON-NLS-1$
                checker.setInvalidContext(Messages.getString("START_OFFSET_PARAM_NOT_FOUND"), new Object[0]);
                return;
            }
            int numPartitionValues = checker.getOperatorContext().getParameterValues(PARTITION_PARAM).size();
            int numStartOffsetValues = checker.getOperatorContext().getParameterValues(START_OFFSET_PARAM).size();
            if (numPartitionValues != numStartOffsetValues) {
                // $NON-NLS-1$
                checker.setInvalidContext(Messages.getString("PARTITION_SIZE_NOT_EQUAL_TO_OFFSET_SIZE"), new Object[0]);
                return;
            }
            int numTopicValues = checker.getOperatorContext().getParameterValues(TOPIC_PARAM).size();
            if (numTopicValues > 1) {
                // $NON-NLS-1$
                checker.setInvalidContext(Messages.getString("ONLY_ONE_TOPIC_WHEN_USING_STARTOFFSET_PARAM"), new Object[0]);
            }
        }
    }
}
Also used : Attribute(com.ibm.streams.operator.Attribute) JsonObject(com.google.gson.JsonObject) RString(com.ibm.streams.operator.types.RString) StreamSchema(com.ibm.streams.operator.StreamSchema) ContextCheck(com.ibm.streams.operator.OperatorContext.ContextCheck)

Example 27 with StreamSchema

use of com.ibm.streams.operator.StreamSchema in project streamsx.kafka by IBMStreams.

the class AbstractKafkaConsumerOperator method checkUserSpecifiedAttributeNameExists.

private static void checkUserSpecifiedAttributeNameExists(OperatorContextChecker checker, String paramNameToCheck) {
    StreamSchema streamSchema = checker.getOperatorContext().getStreamingOutputs().get(0).getStreamSchema();
    Set<String> paramNames = checker.getOperatorContext().getParameterNames();
    Attribute attr = null;
    if (paramNames.contains(paramNameToCheck)) {
        String topicAttrName = checker.getOperatorContext().getParameterValues(paramNameToCheck).get(0);
        attr = streamSchema.getAttribute(topicAttrName);
        if (attr == null) {
            // $NON-NLS-1$
            checker.setInvalidContext(// $NON-NLS-1$
            Messages.getString("OUTPUT_ATTRIBUTE_NOT_FOUND", attr), new Object[0]);
        }
    }
}
Also used : Attribute(com.ibm.streams.operator.Attribute) RString(com.ibm.streams.operator.types.RString) StreamSchema(com.ibm.streams.operator.StreamSchema)

Example 28 with StreamSchema

use of com.ibm.streams.operator.StreamSchema in project streamsx.kafka by IBMStreams.

the class AbstractKafkaConsumerOperator method initialize.

@Override
public synchronized void initialize(OperatorContext context) throws Exception {
    // Must call super.initialize(context) to correctly setup an operator.
    super.initialize(context);
    logger.trace(// $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
    "Operator " + context.getName() + " initializing in PE: " + context.getPE().getPEId() + " in Job: " + context.getPE().getJobId());
    shutdown = new AtomicBoolean(false);
    gson = new Gson();
    StreamSchema outputSchema = context.getStreamingOutputs().get(0).getStreamSchema();
    hasOutputKey = outputSchema.getAttribute(outputKeyAttrName) != null;
    hasOutputTopic = outputSchema.getAttribute(outputTopicAttrName) != null;
    hasOutputTimetamp = outputSchema.getAttribute(outputMessageTimestampAttrName) != null;
    hasOutputPartition = outputSchema.getAttribute(outputPartitionAttrName) != null;
    hasOutputOffset = outputSchema.getAttribute(outputOffsetAttrName) != null;
    Class<?> keyClass = hasOutputKey ? getAttributeType(context.getStreamingOutputs().get(0), outputKeyAttrName) : // default to String.class for key type
    String.class;
    Class<?> valueClass = getAttributeType(context.getStreamingOutputs().get(0), outputMessageAttrName);
    KafkaOperatorProperties kafkaProperties = getKafkaProperties();
    // $NON-NLS-1$
    logger.debug("kafkaProperties: " + kafkaProperties);
    // set the group ID property if the groupId parameter is specified
    if (groupId != null && !groupId.isEmpty()) {
        kafkaProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
    }
    consumer = new KafkaConsumerClient.KafkaConsumerClientBuilder().setKafkaProperties(kafkaProperties).setKeyClass(keyClass).setValueClass(valueClass).setOperatorContext(context).build();
    // If an exception occurred during init, throw it!
    if (consumer.getInitializationException() != null) {
        Exception e = consumer.getInitializationException();
        e.printStackTrace();
        logger.error(e.getLocalizedMessage(), e);
        throw e;
    }
    // input port not use, so topic must be defined
    if (context.getStreamingInputs().size() == 0) {
        if (topics != null) {
            registerForDataGovernance(context, topics);
            if (startPosition == StartPosition.Time) {
                consumer.subscribeToTopicsWithTimestamp(topics, partitions, startTime);
            } else if (startPosition == StartPosition.Offset) {
                consumer.subscribeToTopicsWithOffsets(topics, partitions, startOffsets);
            } else {
                consumer.subscribeToTopics(topics, partitions, startPosition);
            }
        }
    }
    crContext = context.getOptionalContext(ConsistentRegionContext.class);
    if (crContext != null && context.getPE().getRelaunchCount() > 0) {
        resettingLatch = new CountDownLatch(1);
    }
    processThread = getOperatorContext().getThreadFactory().newThread(new Runnable() {

        @Override
        public void run() {
            try {
                produceTuples();
            } catch (Exception e) {
                // $NON-NLS-1$
                Logger.getLogger(this.getClass()).error("Operator error", e);
                // Otherwise this thread terminates leaving the PE in a healthy state without being healthy.
                throw new RuntimeException(e);
            }
        }
    });
    processThread.setDaemon(false);
}
Also used : ConsistentRegionContext(com.ibm.streams.operator.state.ConsistentRegionContext) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Gson(com.google.gson.Gson) KafkaOperatorProperties(com.ibm.streamsx.kafka.properties.KafkaOperatorProperties) StreamSchema(com.ibm.streams.operator.StreamSchema) CountDownLatch(java.util.concurrent.CountDownLatch)

Example 29 with StreamSchema

use of com.ibm.streams.operator.StreamSchema in project streamsx.kafka by IBMStreams.

the class AbstractKafkaProducerOperator method checkAttributes.

@ContextCheck(runtime = true, compile = false)
public static void checkAttributes(OperatorContextChecker checker) {
    StreamSchema streamSchema = checker.getOperatorContext().getStreamingInputs().get(0).getStreamSchema();
    /*
         * The message attribute must either be defined via the 'messageAttr' parameter, 
         * or the input schema must contain an attribute named "message". Otherwise, 
         * a context error is returned.
         */
    Attribute msgAttr;
    List<String> messageAttrParamValues = checker.getOperatorContext().getParameterValues(MESSAGEATTR_PARAM_NAME);
    if (messageAttrParamValues != null && !messageAttrParamValues.isEmpty()) {
        msgAttr = streamSchema.getAttribute(parseFQAttributeName(messageAttrParamValues.get(0)));
    } else {
        // the 'messageAttr' parameter is not specified, so check if input schema contains an attribute named "message"
        msgAttr = streamSchema.getAttribute(DEFAULT_MESSAGE_ATTR_NAME);
    }
    if (msgAttr != null) {
        // validate the message attribute type
        checker.checkAttributeType(msgAttr, SUPPORTED_ATTR_TYPES);
    } else {
        // the operator does not specify a message attribute, so set an invalid context
        // $NON-NLS-1$
        checker.setInvalidContext(Messages.getString("MESSAGE_ATTRIBUTE_NOT_FOUND"), new Object[0]);
    }
    /*
         * A key attribute can either be specified via the 'keyAttr' parameter,
         * or the input schema can contain an attribute named "key". If neither is true, 
         * then a 'null' key will be used when writing records to Kafka (i.e. do not 
         * set an invalid context) 
         */
    List<String> keyParamValues = checker.getOperatorContext().getParameterValues(KEYATTR_PARAM_NAME);
    Attribute keyAttr = (keyParamValues != null && !keyParamValues.isEmpty()) ? streamSchema.getAttribute(parseFQAttributeName(keyParamValues.get(0))) : streamSchema.getAttribute(DEFAULT_KEY_ATTR_NAME);
    // validate the key attribute type
    if (keyAttr != null)
        checker.checkAttributeType(keyAttr, SUPPORTED_ATTR_TYPES);
    /*
         * For topics, one of the following must be true: 
         *  * the 'topic' parameter is specified that lists topics to write to
         *  * the 'topicAttr' parameter is specified that points to an input attribute containing the topic to write to
         *  * neither of the above parameters are specified but the input schema contains an attribute named "topic"
         *  
         * An invalid context is set if none of the above conditions are true
         */
    if (!checker.getOperatorContext().getParameterNames().contains(TOPIC_PARAM_NAME)) {
        // 'topic' param not specified, check for 'topicAttr' param
        if (!checker.getOperatorContext().getParameterNames().contains(TOPICATTR_PARAM_NAME)) {
            // 'topicAttr' param also not specified, check for input attribute named "topic"
            Attribute topicAttribute = streamSchema.getAttribute(DEFAULT_TOPIC_ATTR_NAME);
            if (topicAttribute == null) {
                // "topic" input attribute does not exist...set invalid context
                // $NON-NLS-1$
                checker.setInvalidContext(Messages.getString("TOPIC_NOT_SPECIFIED"), new Object[0]);
            }
        }
    }
}
Also used : Attribute(com.ibm.streams.operator.Attribute) DefaultAttribute(com.ibm.streams.operator.model.DefaultAttribute) TupleAttribute(com.ibm.streams.operator.TupleAttribute) StreamSchema(com.ibm.streams.operator.StreamSchema) ContextCheck(com.ibm.streams.operator.OperatorContext.ContextCheck)

Example 30 with StreamSchema

use of com.ibm.streams.operator.StreamSchema in project streamsx.kafka by IBMStreams.

the class AbstractKafkaProducerOperator method checkPartitionAttributeType.

/*
     * If the `partitionAttribute` is not defined, then the operator will look
     * for an input attribute called "partition". Here, we need to check that this
     * input attribute is of type "int32". 
     */
@ContextCheck(compile = true)
public static void checkPartitionAttributeType(OperatorContextChecker checker) {
    if (!checker.getOperatorContext().getParameterNames().contains(PARTITIONATTR_PARAM_NAME)) {
        StreamSchema schema = checker.getOperatorContext().getStreamingInputs().get(0).getStreamSchema();
        // $NON-NLS-1$
        Attribute partition = schema.getAttribute("partition");
        if (partition != null) {
            if (!checker.checkAttributeType(partition, MetaType.INT32)) {
                // $NON-NLS-1$
                checker.setInvalidContext(Messages.getString("PARTITION_ATTRIBUTE_NOT_INT32"), new Object[0]);
            }
        }
    }
}
Also used : Attribute(com.ibm.streams.operator.Attribute) DefaultAttribute(com.ibm.streams.operator.model.DefaultAttribute) TupleAttribute(com.ibm.streams.operator.TupleAttribute) StreamSchema(com.ibm.streams.operator.StreamSchema) ContextCheck(com.ibm.streams.operator.OperatorContext.ContextCheck)

Aggregations

StreamSchema (com.ibm.streams.operator.StreamSchema)32 Test (org.junit.Test)16 SPLStream (com.ibm.streamsx.topology.spl.SPLStream)15 Topology (com.ibm.streamsx.topology.Topology)14 OutputTuple (com.ibm.streams.operator.OutputTuple)12 Tester (com.ibm.streamsx.topology.tester.Tester)12 List (java.util.List)11 HashMap (java.util.HashMap)8 TStream (com.ibm.streamsx.topology.TStream)7 TestTopology (com.ibm.streamsx.topology.test.TestTopology)7 Map (java.util.Map)7 Attribute (com.ibm.streams.operator.Attribute)6 Constants (com.ibm.streamsx.kafka.test.utils.Constants)6 Delay (com.ibm.streamsx.kafka.test.utils.Delay)6 KafkaSPLStreamsUtils (com.ibm.streamsx.kafka.test.utils.KafkaSPLStreamsUtils)6 StreamsContext (com.ibm.streamsx.topology.context.StreamsContext)6 Type (com.ibm.streamsx.topology.context.StreamsContext.Type)6 StreamsContextFactory (com.ibm.streamsx.topology.context.StreamsContextFactory)6 BiFunction (com.ibm.streamsx.topology.function.BiFunction)6 SPL (com.ibm.streamsx.topology.spl.SPL)6