Search in sources :

Example 36 with Collections

use of java.util.Collections in project beam by apache.

the class KafkaIOTest method mkMockConsumer.

// Update mock consumer with records distributed among the given topics, each with given number
// of partitions. Records are assigned in round-robin order among the partitions.
private static MockConsumer<byte[], byte[]> mkMockConsumer(List<String> topics, int partitionsPerTopic, int numElements, OffsetResetStrategy offsetResetStrategy, Map<String, Object> config, SerializableFunction<Integer, byte[]> keyFunction, SerializableFunction<Integer, byte[]> valueFunction) {
    final List<TopicPartition> partitions = new ArrayList<>();
    final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> records = new HashMap<>();
    Map<String, List<PartitionInfo>> partitionMap = new HashMap<>();
    for (String topic : topics) {
        List<PartitionInfo> partIds = new ArrayList<>(partitionsPerTopic);
        for (int i = 0; i < partitionsPerTopic; i++) {
            TopicPartition tp = new TopicPartition(topic, i);
            partitions.add(tp);
            partIds.add(new PartitionInfo(topic, i, null, null, null));
            records.put(tp, new ArrayList<>());
        }
        partitionMap.put(topic, partIds);
    }
    int numPartitions = partitions.size();
    final long[] offsets = new long[numPartitions];
    long timestampStartMillis = (Long) config.getOrDefault(TIMESTAMP_START_MILLIS_CONFIG, LOG_APPEND_START_TIME.getMillis());
    TimestampType timestampType = TimestampType.forName((String) config.getOrDefault(TIMESTAMP_TYPE_CONFIG, TimestampType.LOG_APPEND_TIME.toString()));
    for (int i = 0; i < numElements; i++) {
        int pIdx = i % numPartitions;
        TopicPartition tp = partitions.get(pIdx);
        byte[] key = keyFunction.apply(i);
        byte[] value = valueFunction.apply(i);
        records.get(tp).add(new ConsumerRecord<>(tp.topic(), tp.partition(), offsets[pIdx]++, timestampStartMillis + Duration.standardSeconds(i).getMillis(), timestampType, 0, key.length, value.length, key, value));
    }
    // This is updated when reader assigns partitions.
    final AtomicReference<List<TopicPartition>> assignedPartitions = new AtomicReference<>(Collections.<TopicPartition>emptyList());
    final MockConsumer<byte[], byte[]> consumer = new MockConsumer<byte[], byte[]>(offsetResetStrategy) {

        @Override
        public synchronized void assign(final Collection<TopicPartition> assigned) {
            super.assign(assigned);
            assignedPartitions.set(ImmutableList.copyOf(assigned));
            for (TopicPartition tp : assigned) {
                updateBeginningOffsets(ImmutableMap.of(tp, 0L));
                updateEndOffsets(ImmutableMap.of(tp, (long) records.get(tp).size()));
            }
        }

        // Override offsetsForTimes() in order to look up the offsets by timestamp.
        @Override
        public synchronized Map<TopicPartition, OffsetAndTimestamp> offsetsForTimes(Map<TopicPartition, Long> timestampsToSearch) {
            return timestampsToSearch.entrySet().stream().map(e -> {
                // In test scope, timestamp == offset.
                long maxOffset = offsets[partitions.indexOf(e.getKey())];
                long offset = e.getValue();
                OffsetAndTimestamp value = (offset >= maxOffset) ? null : new OffsetAndTimestamp(offset, offset);
                return new SimpleEntry<>(e.getKey(), value);
            }).collect(Collectors.toMap(SimpleEntry::getKey, SimpleEntry::getValue));
        }
    };
    for (String topic : topics) {
        consumer.updatePartitions(topic, partitionMap.get(topic));
    }
    // MockConsumer does not maintain any relationship between partition seek position and the
    // records added. e.g. if we add 10 records to a partition and then seek to end of the
    // partition, MockConsumer is still going to return the 10 records in next poll. It is
    // our responsibility to make sure currently enqueued records sync with partition offsets.
    // The following task will be called inside each invocation to MockConsumer.poll().
    // We enqueue only the records with the offset >= partition's current position.
    Runnable recordEnqueueTask = new Runnable() {

        @Override
        public void run() {
            // add all the records with offset >= current partition position.
            int recordsAdded = 0;
            for (TopicPartition tp : assignedPartitions.get()) {
                long curPos = consumer.position(tp);
                for (ConsumerRecord<byte[], byte[]> r : records.get(tp)) {
                    if (r.offset() >= curPos) {
                        consumer.addRecord(r);
                        recordsAdded++;
                    }
                }
            }
            if (recordsAdded == 0) {
                if (config.get("inject.error.at.eof") != null) {
                    consumer.setException(new KafkaException("Injected error in consumer.poll()"));
                }
                // MockConsumer.poll(timeout) does not actually wait even when there aren't any
                // records.
                // Add a small wait here in order to avoid busy looping in the reader.
                Uninterruptibles.sleepUninterruptibly(10, TimeUnit.MILLISECONDS);
            // TODO: BEAM-4086: testUnboundedSourceWithoutBoundedWrapper() occasionally hangs
            // without this wait. Need to look into it.
            }
            consumer.schedulePollTask(this);
        }
    };
    consumer.schedulePollTask(recordEnqueueTask);
    return consumer;
}
Also used : Count(org.apache.beam.sdk.transforms.Count) MetricName(org.apache.beam.sdk.metrics.MetricName) Arrays(java.util.Arrays) PipelineExecutionException(org.apache.beam.sdk.Pipeline.PipelineExecutionException) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) CoderUtils(org.apache.beam.sdk.util.CoderUtils) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) UnboundedSource(org.apache.beam.sdk.io.UnboundedSource) KafkaException(org.apache.kafka.common.KafkaException) ImmutableMap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap) OffsetResetStrategy(org.apache.kafka.clients.consumer.OffsetResetStrategy) Map(java.util.Map) Window(org.apache.beam.sdk.transforms.windowing.Window) Uninterruptibles(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Uninterruptibles) TimestampType(org.apache.kafka.common.record.TimestampType) MockSchemaRegistry(io.confluent.kafka.schemaregistry.testutil.MockSchemaRegistry) MetricResult(org.apache.beam.sdk.metrics.MetricResult) UnboundedReader(org.apache.beam.sdk.io.UnboundedSource.UnboundedReader) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) PartitionInfo(org.apache.kafka.common.PartitionInfo) OffsetAndTimestamp(org.apache.kafka.clients.consumer.OffsetAndTimestamp) StandardCharsets(java.nio.charset.StandardCharsets) Executors(java.util.concurrent.Executors) Matchers.instanceOf(org.hamcrest.Matchers.instanceOf) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) MetricQueryResults(org.apache.beam.sdk.metrics.MetricQueryResults) Matchers.greaterThan(org.hamcrest.Matchers.greaterThan) DisplayDataMatchers.hasDisplayItem(org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem) Matchers.containsString(org.hamcrest.Matchers.containsString) KV(org.apache.beam.sdk.values.KV) Duration(org.joda.time.Duration) RunWith(org.junit.runner.RunWith) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) ArrayList(java.util.ArrayList) SinkMetrics(org.apache.beam.sdk.metrics.SinkMetrics) Read(org.apache.beam.sdk.io.Read) Distinct(org.apache.beam.sdk.transforms.Distinct) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) TestPipeline(org.apache.beam.sdk.testing.TestPipeline) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Deserializer(org.apache.kafka.common.serialization.Deserializer) DoFn(org.apache.beam.sdk.transforms.DoFn) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) ThrowableMessageMatcher.hasMessage(org.junit.internal.matchers.ThrowableMessageMatcher.hasMessage) PAssert(org.apache.beam.sdk.testing.PAssert) Producer(org.apache.kafka.clients.producer.Producer) Assert.assertTrue(org.junit.Assert.assertTrue) IOException(java.io.IOException) Test(org.junit.Test) AbstractKafkaAvroSerDeConfig(io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig) MetricResultsMatchers.attemptedMetricsResult(org.apache.beam.sdk.metrics.MetricResultsMatchers.attemptedMetricsResult) Matchers.hasItem(org.hamcrest.Matchers.hasItem) Assert.assertNull(org.junit.Assert.assertNull) IsIterableWithSize(org.hamcrest.collection.IsIterableWithSize) Serializer(org.apache.kafka.common.serialization.Serializer) BoundedWindow(org.apache.beam.sdk.transforms.windowing.BoundedWindow) VarIntCoder(org.apache.beam.sdk.coders.VarIntCoder) Assert.assertEquals(org.junit.Assert.assertEquals) MockProducer(org.apache.kafka.clients.producer.MockProducer) SourceMetrics(org.apache.beam.sdk.metrics.SourceMetrics) Matchers.isA(org.hamcrest.Matchers.isA) MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) PipelineResult(org.apache.beam.sdk.PipelineResult) LoggerFactory(org.slf4j.LoggerFactory) SerializableFunction(org.apache.beam.sdk.transforms.SerializableFunction) ByteBuffer(java.nio.ByteBuffer) PCollectionList(org.apache.beam.sdk.values.PCollectionList) ThrowableCauseMatcher.hasCause(org.junit.internal.matchers.ThrowableCauseMatcher.hasCause) Method(java.lang.reflect.Method) Flatten(org.apache.beam.sdk.transforms.Flatten) MapElements(org.apache.beam.sdk.transforms.MapElements) Min(org.apache.beam.sdk.transforms.Min) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) Collection(java.util.Collection) BigEndianIntegerCoder(org.apache.beam.sdk.coders.BigEndianIntegerCoder) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) VarLongCoder(org.apache.beam.sdk.coders.VarLongCoder) LongSerializer(org.apache.kafka.common.serialization.LongSerializer) Collectors(java.util.stream.Collectors) List(java.util.List) Max(org.apache.beam.sdk.transforms.Max) ParDo(org.apache.beam.sdk.transforms.ParDo) Header(org.apache.kafka.common.header.Header) TypeDescriptors(org.apache.beam.sdk.values.TypeDescriptors) Optional(java.util.Optional) ImmutableList(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList) Assume.assumeTrue(org.junit.Assume.assumeTrue) Values(org.apache.beam.sdk.transforms.Values) MetricNameFilter(org.apache.beam.sdk.metrics.MetricNameFilter) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Headers(org.apache.kafka.common.header.Headers) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) AvroGeneratedUser(org.apache.beam.sdk.io.AvroGeneratedUser) PipelineOptionsFactory(org.apache.beam.sdk.options.PipelineOptionsFactory) AtomicReference(java.util.concurrent.atomic.AtomicReference) ConcurrentMap(java.util.concurrent.ConcurrentMap) SimpleEntry(java.util.AbstractMap.SimpleEntry) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ExpectedException(org.junit.rules.ExpectedException) ExecutorService(java.util.concurrent.ExecutorService) Nullable(org.checkerframework.checker.nullness.qual.Nullable) Utils(org.apache.kafka.common.utils.Utils) DisplayData(org.apache.beam.sdk.transforms.display.DisplayData) GenericRecord(org.apache.avro.generic.GenericRecord) Logger(org.slf4j.Logger) BigEndianLongCoder(org.apache.beam.sdk.coders.BigEndianLongCoder) Assert.assertNotNull(org.junit.Assert.assertNotNull) FixedWindows(org.apache.beam.sdk.transforms.windowing.FixedWindows) Lists(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists) JUnit4(org.junit.runners.JUnit4) PCollection(org.apache.beam.sdk.values.PCollection) MetricsFilter(org.apache.beam.sdk.metrics.MetricsFilter) TimeUnit(java.util.concurrent.TimeUnit) Rule(org.junit.Rule) Ignore(org.junit.Ignore) ConfluentSchemaRegistryDeserializerProviderTest.mockDeserializerProvider(org.apache.beam.sdk.io.kafka.ConfluentSchemaRegistryDeserializerProviderTest.mockDeserializerProvider) Instant(org.joda.time.Instant) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) Comparator(java.util.Comparator) Collections(java.util.Collections) IsIterableContainingInAnyOrder(org.hamcrest.collection.IsIterableContainingInAnyOrder) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Matchers.containsString(org.hamcrest.Matchers.containsString) TimestampType(org.apache.kafka.common.record.TimestampType) ArrayList(java.util.ArrayList) PCollectionList(org.apache.beam.sdk.values.PCollectionList) List(java.util.List) ImmutableList(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList) PartitionInfo(org.apache.kafka.common.PartitionInfo) MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) OffsetAndTimestamp(org.apache.kafka.clients.consumer.OffsetAndTimestamp) AtomicReference(java.util.concurrent.atomic.AtomicReference) TopicPartition(org.apache.kafka.common.TopicPartition) Collection(java.util.Collection) PCollection(org.apache.beam.sdk.values.PCollection) KafkaException(org.apache.kafka.common.KafkaException) ImmutableMap(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap)

Example 37 with Collections

use of java.util.Collections in project groovy by apache.

the class StaticTypeCheckingVisitor method visitBinaryExpression.

@Override
public void visitBinaryExpression(final BinaryExpression expression) {
    BinaryExpression enclosingBinaryExpression = typeCheckingContext.getEnclosingBinaryExpression();
    typeCheckingContext.pushEnclosingBinaryExpression(expression);
    try {
        int op = expression.getOperation().getType();
        Expression leftExpression = expression.getLeftExpression();
        Expression rightExpression = expression.getRightExpression();
        leftExpression.visit(this);
        SetterInfo setterInfo = removeSetterInfo(leftExpression);
        ClassNode lType = null;
        if (setterInfo != null) {
            if (ensureValidSetter(expression, leftExpression, rightExpression, setterInfo)) {
                return;
            }
            lType = getType(leftExpression);
        } else {
            if (op != EQUAL && op != ELVIS_EQUAL) {
                lType = getType(leftExpression);
            } else {
                lType = getOriginalDeclarationType(leftExpression);
                if (isFunctionalInterface(lType)) {
                    processFunctionalInterfaceAssignment(lType, rightExpression);
                } else if (isClosureWithType(lType) && rightExpression instanceof ClosureExpression) {
                    storeInferredReturnType(rightExpression, getCombinedBoundType(lType.getGenericsTypes()[0]));
                }
            }
            rightExpression.visit(this);
        }
        ClassNode rType = isNullConstant(rightExpression) && !isPrimitiveType(lType) ? // null to primitive type is handled elsewhere
        UNKNOWN_PARAMETER_TYPE : getInferredTypeFromTempInfo(rightExpression, getType(rightExpression));
        ClassNode resultType;
        if (op == KEYWORD_IN || op == COMPARE_NOT_IN) {
            // for the "in" or "!in" operator, the receiver and the arguments are reversed
            BinaryExpression reverseExpression = binX(rightExpression, expression.getOperation(), leftExpression);
            resultType = getResultType(rType, op, lType, reverseExpression);
            // GROOVY-10239
            if (resultType == null)
                resultType = boolean_TYPE;
            storeTargetMethod(expression, reverseExpression.getNodeMetaData(DIRECT_METHOD_CALL_TARGET));
        } else {
            resultType = getResultType(lType, op, rType, expression);
            if (op == ELVIS_EQUAL) {
                // TODO: Should this transform and visit be done before left and right are visited above?
                Expression fullExpression = new ElvisOperatorExpression(leftExpression, rightExpression);
                fullExpression.setSourcePosition(expression);
                fullExpression.visit(this);
                resultType = getType(fullExpression);
            }
        }
        if (resultType == null) {
            resultType = lType;
        }
        if (isArrayOp(op)) {
            if (leftExpression instanceof VariableExpression) {
                // GROOVY-6782
                if (leftExpression.getNodeMetaData(INFERRED_TYPE) == null) {
                    leftExpression.removeNodeMetaData(INFERRED_RETURN_TYPE);
                    storeType(leftExpression, lType);
                }
            }
            if (!lType.isArray() && enclosingBinaryExpression != null && enclosingBinaryExpression.getLeftExpression() == expression && isAssignment(enclosingBinaryExpression.getOperation().getType())) {
                // left hand side of a subscript assignment: map['foo'] = ...
                Expression enclosingExpressionRHS = enclosingBinaryExpression.getRightExpression();
                if (!(enclosingExpressionRHS instanceof ClosureExpression)) {
                    enclosingExpressionRHS.visit(this);
                }
                ClassNode[] arguments = { rType, getType(enclosingExpressionRHS) };
                List<MethodNode> nodes = findMethod(lType.redirect(), "putAt", arguments);
                if (nodes.size() == 1) {
                    typeCheckMethodsWithGenericsOrFail(lType, arguments, nodes.get(0), enclosingExpressionRHS);
                } else if (nodes.isEmpty()) {
                    addNoMatchingMethodError(lType, "putAt", arguments, enclosingBinaryExpression);
                }
            }
        }
        boolean isEmptyDeclaration = (expression instanceof DeclarationExpression && (rightExpression instanceof EmptyExpression || rType == UNKNOWN_PARAMETER_TYPE));
        if (!isEmptyDeclaration && isAssignment(op)) {
            if (rightExpression instanceof ConstructorCallExpression)
                inferDiamondType((ConstructorCallExpression) rightExpression, lType);
            if (lType.isUsingGenerics() && missesGenericsTypes(resultType)) {
                // "completed" with generics type information available from the LHS
                if (lType.equals(resultType)) {
                    if (!lType.isGenericsPlaceHolder())
                        resultType = lType;
                } else if (!resultType.isGenericsPlaceHolder()) {
                    // GROOVY-10324
                    Map<GenericsTypeName, GenericsType> gt = new HashMap<>();
                    extractGenericsConnections(gt, resultType, resultType.redirect());
                    extractGenericsConnections(gt, lType, getNextSuperClass(resultType, lType));
                    // GROOVY-10235, et al.
                    resultType = applyGenericsContext(gt, resultType.redirect());
                }
            }
            ClassNode originType = getOriginalDeclarationType(leftExpression);
            typeCheckAssignment(expression, leftExpression, originType, rightExpression, resultType);
            // check for implicit conversion like "String a = 123", "int[] b = [1,2,3]", "List c = [].stream()", etc.
            if (!implementsInterfaceOrIsSubclassOf(wrapTypeIfNecessary(resultType), wrapTypeIfNecessary(originType))) {
                resultType = originType;
            } else if (isPrimitiveType(originType) && resultType.equals(getWrapper(originType))) {
                // retain primitive semantics
                resultType = originType;
            } else {
                // GROOVY-7549: RHS type may not be accessible to enclosing class
                int modifiers = resultType.getModifiers();
                ClassNode enclosingType = typeCheckingContext.getEnclosingClassNode();
                if (!Modifier.isPublic(modifiers) && !enclosingType.equals(resultType) && !getOutermost(enclosingType).equals(getOutermost(resultType)) && (Modifier.isPrivate(modifiers) || !Objects.equals(enclosingType.getPackageName(), resultType.getPackageName()))) {
                    // TODO: Find accesible type in hierarchy of resultType?
                    resultType = originType;
                } else if (GenericsUtils.hasUnresolvedGenerics(resultType)) {
                    // GROOVY-9033, GROOVY-10089, et al.
                    Map<GenericsTypeName, GenericsType> enclosing = extractGenericsParameterMapOfThis(typeCheckingContext);
                    resultType = fullyResolveType(resultType, Optional.ofNullable(enclosing).orElseGet(Collections::emptyMap));
                }
            }
            // track conditional assignment
            if (leftExpression instanceof VariableExpression && typeCheckingContext.ifElseForWhileAssignmentTracker != null) {
                Variable accessedVariable = ((VariableExpression) leftExpression).getAccessedVariable();
                if (accessedVariable instanceof Parameter) {
                    accessedVariable = new ParameterVariableExpression((Parameter) accessedVariable);
                }
                if (accessedVariable instanceof VariableExpression) {
                    recordAssignment((VariableExpression) accessedVariable, resultType);
                }
            }
            storeType(leftExpression, resultType);
            // propagate closure parameter type information
            if (leftExpression instanceof VariableExpression) {
                if (rightExpression instanceof ClosureExpression) {
                    leftExpression.putNodeMetaData(CLOSURE_ARGUMENTS, ((ClosureExpression) rightExpression).getParameters());
                } else if (rightExpression instanceof VariableExpression && ((VariableExpression) rightExpression).getAccessedVariable() instanceof Expression && ((Expression) ((VariableExpression) rightExpression).getAccessedVariable()).getNodeMetaData(CLOSURE_ARGUMENTS) != null) {
                    Variable targetVariable = findTargetVariable((VariableExpression) leftExpression);
                    if (targetVariable instanceof ASTNode) {
                        ((ASTNode) targetVariable).putNodeMetaData(CLOSURE_ARGUMENTS, ((Expression) ((VariableExpression) rightExpression).getAccessedVariable()).getNodeMetaData(CLOSURE_ARGUMENTS));
                    }
                }
            }
        } else if (op == KEYWORD_INSTANCEOF) /*|| op == COMPARE_NOT_INSTANCEOF*/
        {
            pushInstanceOfTypeInfo(leftExpression, rightExpression);
        }
        if (!isEmptyDeclaration) {
            storeType(expression, resultType);
        }
        validateResourceInARM(expression, resultType);
        // GROOVY-5874: if left expression is a closure shared variable, a second pass should be done
        if (leftExpression instanceof VariableExpression && ((VariableExpression) leftExpression).isClosureSharedVariable()) {
            typeCheckingContext.secondPassExpressions.add(new SecondPassExpression<>(expression));
        }
    } finally {
        typeCheckingContext.popEnclosingBinaryExpression();
    }
}
Also used : DynamicVariable(org.codehaus.groovy.ast.DynamicVariable) Variable(org.codehaus.groovy.ast.Variable) StaticTypeCheckingSupport.findTargetVariable(org.codehaus.groovy.transform.stc.StaticTypeCheckingSupport.findTargetVariable) BinaryExpression(org.codehaus.groovy.ast.expr.BinaryExpression) MethodNode(org.codehaus.groovy.ast.MethodNode) ElvisOperatorExpression(org.codehaus.groovy.ast.expr.ElvisOperatorExpression) GenericsType(org.codehaus.groovy.ast.GenericsType) StaticTypeCheckingSupport.getCombinedGenericsType(org.codehaus.groovy.transform.stc.StaticTypeCheckingSupport.getCombinedGenericsType) ASTNode(org.codehaus.groovy.ast.ASTNode) ClosureExpression(org.codehaus.groovy.ast.expr.ClosureExpression) Collections(java.util.Collections) StaticTypeCheckingSupport.findDGMMethodsForClassNode(org.codehaus.groovy.transform.stc.StaticTypeCheckingSupport.findDGMMethodsForClassNode) ClassNode(org.codehaus.groovy.ast.ClassNode) InnerClassNode(org.codehaus.groovy.ast.InnerClassNode) ConstructorCallExpression(org.codehaus.groovy.ast.expr.ConstructorCallExpression) DeclarationExpression(org.codehaus.groovy.ast.expr.DeclarationExpression) VariableExpression(org.codehaus.groovy.ast.expr.VariableExpression) ClosureSignatureHint(groovy.transform.stc.ClosureSignatureHint) EmptyExpression(org.codehaus.groovy.ast.expr.EmptyExpression) UnaryMinusExpression(org.codehaus.groovy.ast.expr.UnaryMinusExpression) ClosureListExpression(org.codehaus.groovy.ast.expr.ClosureListExpression) ArgumentListExpression(org.codehaus.groovy.ast.expr.ArgumentListExpression) MethodReferenceExpression(org.codehaus.groovy.ast.expr.MethodReferenceExpression) TernaryExpression(org.codehaus.groovy.ast.expr.TernaryExpression) PropertyExpression(org.codehaus.groovy.ast.expr.PropertyExpression) PrefixExpression(org.codehaus.groovy.ast.expr.PrefixExpression) PostfixExpression(org.codehaus.groovy.ast.expr.PostfixExpression) Expression(org.codehaus.groovy.ast.expr.Expression) UnaryPlusExpression(org.codehaus.groovy.ast.expr.UnaryPlusExpression) AnnotationConstantExpression(org.codehaus.groovy.ast.expr.AnnotationConstantExpression) BitwiseNegationExpression(org.codehaus.groovy.ast.expr.BitwiseNegationExpression) MapExpression(org.codehaus.groovy.ast.expr.MapExpression) ConstantExpression(org.codehaus.groovy.ast.expr.ConstantExpression) StaticTypeCheckingSupport.evaluateExpression(org.codehaus.groovy.transform.stc.StaticTypeCheckingSupport.evaluateExpression) NotExpression(org.codehaus.groovy.ast.expr.NotExpression) FieldExpression(org.codehaus.groovy.ast.expr.FieldExpression) EmptyExpression(org.codehaus.groovy.ast.expr.EmptyExpression) ConstructorCallExpression(org.codehaus.groovy.ast.expr.ConstructorCallExpression) ClassExpression(org.codehaus.groovy.ast.expr.ClassExpression) TupleExpression(org.codehaus.groovy.ast.expr.TupleExpression) MethodPointerExpression(org.codehaus.groovy.ast.expr.MethodPointerExpression) MapEntryExpression(org.codehaus.groovy.ast.expr.MapEntryExpression) VariableExpression(org.codehaus.groovy.ast.expr.VariableExpression) CastExpression(org.codehaus.groovy.ast.expr.CastExpression) StaticMethodCallExpression(org.codehaus.groovy.ast.expr.StaticMethodCallExpression) LambdaExpression(org.codehaus.groovy.ast.expr.LambdaExpression) ListExpression(org.codehaus.groovy.ast.expr.ListExpression) RangeExpression(org.codehaus.groovy.ast.expr.RangeExpression) SpreadExpression(org.codehaus.groovy.ast.expr.SpreadExpression) ArrayExpression(org.codehaus.groovy.ast.expr.ArrayExpression) BinaryExpression(org.codehaus.groovy.ast.expr.BinaryExpression) ElvisOperatorExpression(org.codehaus.groovy.ast.expr.ElvisOperatorExpression) MethodCallExpression(org.codehaus.groovy.ast.expr.MethodCallExpression) DeclarationExpression(org.codehaus.groovy.ast.expr.DeclarationExpression) ClosureExpression(org.codehaus.groovy.ast.expr.ClosureExpression) AttributeExpression(org.codehaus.groovy.ast.expr.AttributeExpression) GenericsTypeName(org.codehaus.groovy.ast.GenericsType.GenericsTypeName) ClosureUtils.hasImplicitParameter(org.codehaus.groovy.ast.tools.ClosureUtils.hasImplicitParameter) Parameter(org.codehaus.groovy.ast.Parameter) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) Collectors.toMap(java.util.stream.Collectors.toMap) EnumMap(java.util.EnumMap) HashMap(java.util.HashMap)

Example 38 with Collections

use of java.util.Collections in project groovy by apache.

the class StaticCompilationVisitor method addPrivateFieldsAccessors.

/**
 * Adds special accessors and mutators for private fields so that inner classes can get/set them.
 */
private static void addPrivateFieldsAccessors(final ClassNode node) {
    Map<String, MethodNode> privateFieldAccessors = node.getNodeMetaData(PRIVATE_FIELDS_ACCESSORS);
    Map<String, MethodNode> privateFieldMutators = node.getNodeMetaData(PRIVATE_FIELDS_MUTATORS);
    if (privateFieldAccessors != null || privateFieldMutators != null) {
        // already added
        return;
    }
    Set<ASTNode> accessedFields = node.getNodeMetaData(PV_FIELDS_ACCESS);
    Set<ASTNode> mutatedFields = node.getNodeMetaData(PV_FIELDS_MUTATION);
    if (accessedFields == null && mutatedFields == null)
        return;
    // GROOVY-9385: mutation includes access in case of compound assignment or pre/post-increment/decrement
    if (mutatedFields != null) {
        accessedFields = new HashSet<>(Optional.ofNullable(accessedFields).orElseGet(Collections::emptySet));
        accessedFields.addAll(mutatedFields);
    }
    int acc = -1;
    privateFieldAccessors = (accessedFields != null ? new HashMap<>() : null);
    privateFieldMutators = (mutatedFields != null ? new HashMap<>() : null);
    final int modifiers = ACC_PUBLIC | ACC_STATIC | ACC_SYNTHETIC;
    for (FieldNode fieldNode : node.getFields()) {
        boolean generateAccessor = accessedFields != null && accessedFields.contains(fieldNode);
        boolean generateMutator = mutatedFields != null && mutatedFields.contains(fieldNode);
        if (generateAccessor) {
            acc += 1;
            Parameter param = new Parameter(node.getPlainNodeReference(), "$that");
            Expression receiver = fieldNode.isStatic() ? classX(node) : varX(param);
            Statement body = returnS(attrX(receiver, constX(fieldNode.getName())));
            MethodNode accessor = node.addMethod("pfaccess$" + acc, modifiers, fieldNode.getOriginType(), new Parameter[] { param }, ClassNode.EMPTY_ARRAY, body);
            accessor.setNodeMetaData(STATIC_COMPILE_NODE, Boolean.TRUE);
            privateFieldAccessors.put(fieldNode.getName(), accessor);
        }
        if (generateMutator) {
            // increment acc if it hasn't been incremented in the current iteration
            if (!generateAccessor)
                acc += 1;
            Parameter param = new Parameter(node.getPlainNodeReference(), "$that");
            Expression receiver = fieldNode.isStatic() ? classX(node) : varX(param);
            Parameter value = new Parameter(fieldNode.getOriginType(), "$value");
            Statement body = assignS(attrX(receiver, constX(fieldNode.getName())), varX(value));
            MethodNode mutator = node.addMethod("pfaccess$0" + acc, modifiers, fieldNode.getOriginType(), new Parameter[] { param, value }, ClassNode.EMPTY_ARRAY, body);
            mutator.setNodeMetaData(STATIC_COMPILE_NODE, Boolean.TRUE);
            privateFieldMutators.put(fieldNode.getName(), mutator);
        }
    }
    if (privateFieldAccessors != null) {
        node.setNodeMetaData(PRIVATE_FIELDS_ACCESSORS, privateFieldAccessors);
    }
    if (privateFieldMutators != null) {
        node.setNodeMetaData(PRIVATE_FIELDS_MUTATORS, privateFieldMutators);
    }
}
Also used : FieldNode(org.codehaus.groovy.ast.FieldNode) ForStatement(org.codehaus.groovy.ast.stmt.ForStatement) Statement(org.codehaus.groovy.ast.stmt.Statement) ExpressionStatement(org.codehaus.groovy.ast.stmt.ExpressionStatement) EmptyStatement(org.codehaus.groovy.ast.stmt.EmptyStatement) MethodNode(org.codehaus.groovy.ast.MethodNode) SpreadExpression(org.codehaus.groovy.ast.expr.SpreadExpression) ClosureListExpression(org.codehaus.groovy.ast.expr.ClosureListExpression) ArgumentListExpression(org.codehaus.groovy.ast.expr.ArgumentListExpression) BinaryExpression(org.codehaus.groovy.ast.expr.BinaryExpression) PropertyExpression(org.codehaus.groovy.ast.expr.PropertyExpression) MethodCallExpression(org.codehaus.groovy.ast.expr.MethodCallExpression) Expression(org.codehaus.groovy.ast.expr.Expression) ConstructorCallExpression(org.codehaus.groovy.ast.expr.ConstructorCallExpression) ASTNode(org.codehaus.groovy.ast.ASTNode) Parameter(org.codehaus.groovy.ast.Parameter) Collections(java.util.Collections)

Example 39 with Collections

use of java.util.Collections in project crate by crate.

the class ClusterBootstrapServiceTests method testMatchesOnNodeAddress.

public void testMatchesOnNodeAddress() {
    final AtomicBoolean bootstrapped = new AtomicBoolean();
    ClusterBootstrapService clusterBootstrapService = new ClusterBootstrapService(Settings.builder().putList(INITIAL_MASTER_NODES_SETTING.getKey(), localNode.getAddress().toString()).build(), transportService, Collections::emptyList, () -> false, vc -> assertTrue(bootstrapped.compareAndSet(false, true)));
    transportService.start();
    clusterBootstrapService.onFoundPeersUpdated();
    deterministicTaskQueue.runAllTasks();
    assertTrue(bootstrapped.get());
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Collections(java.util.Collections)

Example 40 with Collections

use of java.util.Collections in project crate by crate.

the class ClusterBootstrapServiceTests method testMatchesOnHostName.

public void testMatchesOnHostName() {
    final AtomicBoolean bootstrapped = new AtomicBoolean();
    ClusterBootstrapService clusterBootstrapService = new ClusterBootstrapService(Settings.builder().putList(INITIAL_MASTER_NODES_SETTING.getKey(), localNode.getHostName()).build(), transportService, Collections::emptyList, () -> false, vc -> assertTrue(bootstrapped.compareAndSet(false, true)));
    transportService.start();
    clusterBootstrapService.onFoundPeersUpdated();
    deterministicTaskQueue.runAllTasks();
    assertTrue(bootstrapped.get());
}
Also used : AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Collections(java.util.Collections)

Aggregations

Collections (java.util.Collections)116 List (java.util.List)60 ArrayList (java.util.ArrayList)41 Test (org.junit.Test)39 Map (java.util.Map)38 Collectors (java.util.stream.Collectors)35 Arrays (java.util.Arrays)28 HashMap (java.util.HashMap)27 Set (java.util.Set)25 HashSet (java.util.HashSet)23 IOException (java.io.IOException)19 Collection (java.util.Collection)19 Optional (java.util.Optional)19 TimeUnit (java.util.concurrent.TimeUnit)16 URI (java.net.URI)13 Assert (org.junit.Assert)13 Function (java.util.function.Function)12 Stream (java.util.stream.Stream)12 Before (org.junit.Before)12 Logger (org.slf4j.Logger)12