Search in sources :

Example 6 with EMPTY

use of org.apache.commons.lang3.StringUtils.EMPTY in project asterixdb by apache.

the class LangExpressionToPlanTranslator method translateLoad.

@Override
public ILogicalPlan translateLoad(ICompiledDmlStatement stmt) throws AlgebricksException {
    CompiledLoadFromFileStatement clffs = (CompiledLoadFromFileStatement) stmt;
    Dataset dataset = metadataProvider.findDataset(clffs.getDataverseName(), clffs.getDatasetName());
    if (dataset == null) {
        // This would never happen since we check for this in AqlTranslator
        throw new AlgebricksException("Unable to load dataset " + clffs.getDatasetName() + " since it does not exist");
    }
    IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
    IAType metaItemType = metadataProvider.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
    DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(), stmt.getDatasetName());
    List<List<String>> partitionKeys = targetDatasource.getDataset().getPrimaryKeys();
    if (dataset.hasMetaPart()) {
        throw new AlgebricksException(dataset.getDatasetName() + ": load dataset is not supported on Datasets with Meta records");
    }
    LoadableDataSource lds;
    try {
        lds = new LoadableDataSource(dataset, itemType, metaItemType, clffs.getAdapter(), clffs.getProperties());
    } catch (IOException e) {
        throw new AlgebricksException(e);
    }
    // etsOp is a dummy input operator used to keep the compiler happy. it
    // could be removed but would result in
    // the need to fix many rewrite rules that assume that datasourcescan
    // operators always have input.
    ILogicalOperator etsOp = new EmptyTupleSourceOperator();
    // Add a logical variable for the record.
    List<LogicalVariable> payloadVars = new ArrayList<>();
    payloadVars.add(context.newVar());
    // Create a scan operator and make the empty tuple source its input
    DataSourceScanOperator dssOp = new DataSourceScanOperator(payloadVars, lds);
    dssOp.getInputs().add(new MutableObject<>(etsOp));
    ILogicalExpression payloadExpr = new VariableReferenceExpression(payloadVars.get(0));
    Mutable<ILogicalExpression> payloadRef = new MutableObject<>(payloadExpr);
    // Creating the assign to extract the PK out of the record
    ArrayList<LogicalVariable> pkVars = new ArrayList<>();
    ArrayList<Mutable<ILogicalExpression>> pkExprs = new ArrayList<>();
    List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<>();
    LogicalVariable payloadVar = payloadVars.get(0);
    for (List<String> keyFieldName : partitionKeys) {
        PlanTranslationUtil.prepareVarAndExpression(keyFieldName, payloadVar, pkVars, pkExprs, varRefsForLoading, context);
    }
    AssignOperator assign = new AssignOperator(pkVars, pkExprs);
    assign.getInputs().add(new MutableObject<>(dssOp));
    // If the input is pre-sorted, we set the ordering property explicitly in the assign
    if (clffs.alreadySorted()) {
        List<OrderColumn> orderColumns = new ArrayList<>();
        for (int i = 0; i < pkVars.size(); ++i) {
            orderColumns.add(new OrderColumn(pkVars.get(i), OrderKind.ASC));
        }
        assign.setExplicitOrderingProperty(new LocalOrderProperty(orderColumns));
    }
    List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset());
    List<LogicalVariable> additionalFilteringVars;
    List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
    List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
    AssignOperator additionalFilteringAssign = null;
    if (additionalFilteringField != null) {
        additionalFilteringVars = new ArrayList<>();
        additionalFilteringAssignExpressions = new ArrayList<>();
        additionalFilteringExpressions = new ArrayList<>();
        PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, payloadVar, additionalFilteringVars, additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
        additionalFilteringAssign = new AssignOperator(additionalFilteringVars, additionalFilteringAssignExpressions);
    }
    InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, payloadRef, varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, true);
    insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
    if (additionalFilteringAssign != null) {
        additionalFilteringAssign.getInputs().add(new MutableObject<>(assign));
        insertOp.getInputs().add(new MutableObject<>(additionalFilteringAssign));
    } else {
        insertOp.getInputs().add(new MutableObject<>(assign));
    }
    ILogicalOperator leafOperator = new SinkOperator();
    leafOperator.getInputs().add(new MutableObject<>(insertOp));
    return new ALogicalPlanImpl(new MutableObject<>(leafOperator));
}
Also used : CompiledLoadFromFileStatement(org.apache.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement) ArrayList(java.util.ArrayList) DatasetDataSource(org.apache.asterix.metadata.declared.DatasetDataSource) AString(org.apache.asterix.om.base.AString) DataSourceScanOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator) ALogicalPlanImpl(org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl) ArrayList(java.util.ArrayList) List(java.util.List) LoadableDataSource(org.apache.asterix.metadata.declared.LoadableDataSource) EmptyTupleSourceOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator) MutableObject(org.apache.commons.lang3.mutable.MutableObject) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) SinkOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator) Dataset(org.apache.asterix.metadata.entities.Dataset) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) OrderColumn(org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IOException(java.io.IOException) AssignOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator) Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) InsertDeleteUpsertOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) LocalOrderProperty(org.apache.hyracks.algebricks.core.algebra.properties.LocalOrderProperty) IAType(org.apache.asterix.om.types.IAType)

Example 7 with EMPTY

use of org.apache.commons.lang3.StringUtils.EMPTY in project asterixdb by apache.

the class InlineAllNtsInSubplanVisitor method visitAggregateOperator.

/**
     * Wraps an AggregateOperator or RunningAggregateOperator with a group-by
     * operator where the group-by keys are variables in keyVarsToEnforce. Note
     * that the function here prevents this visitor being used to rewrite
     * arbitrary query plans. Instead, it could only be used for rewriting a
     * nested plan within a subplan operator.
     *
     * @param op
     *            the logical operator for aggregate or running aggregate.
     * @return the wrapped group-by operator if {@code keyVarsToEnforce} is not
     *         empty, and {@code op} otherwise.
     * @throws AlgebricksException
     */
private ILogicalOperator visitAggregateOperator(ILogicalOperator op) throws AlgebricksException {
    visitSingleInputOperator(op);
    if (correlatedKeyVars.isEmpty()) {
        return op;
    }
    GroupByOperator gbyOp = new GroupByOperator();
    // Creates a copy of correlatedKeyVars, to fix the ConcurrentModificationExcetpion in ASTERIXDB-1581.
    List<LogicalVariable> copyOfCorrelatedKeyVars = new ArrayList<>(correlatedKeyVars);
    for (LogicalVariable keyVar : copyOfCorrelatedKeyVars) {
        // This limits the visitor can only be applied to a nested logical
        // plan inside a Subplan operator,
        // where the keyVarsToEnforce forms a candidate key which can
        // uniquely identify a tuple out of the nested-tuple-source.
        LogicalVariable newVar = context.newVar();
        gbyOp.getGroupByList().add(new Pair<>(newVar, new MutableObject<>(new VariableReferenceExpression(keyVar))));
        updateInputToOutputVarMapping(keyVar, newVar, false);
    }
    ILogicalOperator inputOp = op.getInputs().get(0).getValue();
    gbyOp.getInputs().add(new MutableObject<>(inputOp));
    NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(gbyOp));
    op.getInputs().clear();
    op.getInputs().add(new MutableObject<>(nts));
    ILogicalPlan nestedPlan = new ALogicalPlanImpl();
    nestedPlan.getRoots().add(new MutableObject<>(op));
    gbyOp.getNestedPlans().add(nestedPlan);
    OperatorManipulationUtil.computeTypeEnvironmentBottomUp(gbyOp, context);
    return op;
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) NestedTupleSourceOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator) GroupByOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) ArrayList(java.util.ArrayList) ALogicalPlanImpl(org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) MutableObject(org.apache.commons.lang3.mutable.MutableObject)

Example 8 with EMPTY

use of org.apache.commons.lang3.StringUtils.EMPTY in project asterixdb by apache.

the class NestedSubplanToJoinRule method rewritePost.

@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
    if (context.checkIfInDontApplySet(this, opRef.getValue()))
        return false;
    context.addToDontApplySet(this, opRef.getValue());
    ILogicalOperator op1 = opRef.getValue();
    if (op1.getInputs().size() == 0) {
        return false;
    }
    boolean rewritten = false;
    for (int index = 0; index < op1.getInputs().size(); index++) {
        AbstractLogicalOperator child = (AbstractLogicalOperator) op1.getInputs().get(index).getValue();
        if (child.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
            continue;
        }
        AbstractOperatorWithNestedPlans subplan = (AbstractOperatorWithNestedPlans) child;
        Set<LogicalVariable> freeVars = new HashSet<LogicalVariable>();
        OperatorPropertiesUtil.getFreeVariablesInSubplans(subplan, freeVars);
        if (!freeVars.isEmpty()) {
            /**
                 * the subplan is correlated with the outer plan, other rules can deal with it
                 */
            continue;
        }
        /** get the input operator of the subplan operator */
        ILogicalOperator subplanInput = subplan.getInputs().get(0).getValue();
        AbstractLogicalOperator subplanInputOp = (AbstractLogicalOperator) subplanInput;
        /** If the other join branch is a trivial plan, do not do the rewriting. */
        if (subplanInputOp.getOperatorTag() == LogicalOperatorTag.EMPTYTUPLESOURCE) {
            continue;
        }
        /** get all nested top operators */
        List<ILogicalPlan> nestedPlans = subplan.getNestedPlans();
        List<Mutable<ILogicalOperator>> nestedRoots = new ArrayList<Mutable<ILogicalOperator>>();
        for (ILogicalPlan nestedPlan : nestedPlans) {
            nestedRoots.addAll(nestedPlan.getRoots());
        }
        if (nestedRoots.size() == 0) {
            /** there is no nested top operators */
            continue;
        }
        /**
             * Expends the input and roots into a DAG of nested loop joins.
             * Though joins should be left-outer joins, a left-outer join with condition TRUE is equivalent to an inner join.
             **/
        Mutable<ILogicalExpression> expr = new MutableObject<ILogicalExpression>(ConstantExpression.TRUE);
        Mutable<ILogicalOperator> nestedRootRef = nestedRoots.get(0);
        ILogicalOperator join = new InnerJoinOperator(expr, new MutableObject<ILogicalOperator>(subplanInput), nestedRootRef);
        /** rewrite the nested tuple source to be empty tuple source */
        rewriteNestedTupleSource(nestedRootRef, context);
        for (int i = 1; i < nestedRoots.size(); i++) {
            join = new InnerJoinOperator(expr, new MutableObject<ILogicalOperator>(join), nestedRoots.get(i));
        }
        op1.getInputs().get(index).setValue(join);
        context.computeAndSetTypeEnvironmentForOperator(join);
        rewritten = true;
    }
    return rewritten;
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) ArrayList(java.util.ArrayList) InnerJoinOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator) Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) AbstractOperatorWithNestedPlans(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans) HashSet(java.util.HashSet) MutableObject(org.apache.commons.lang3.mutable.MutableObject)

Example 9 with EMPTY

use of org.apache.commons.lang3.StringUtils.EMPTY in project nifi by apache.

the class HostHeaderHandler method extractIPsFromNetworkInterfaces.

/**
 * Extracts the list of IP addresses from custom bound network interfaces. If both HTTPS and HTTP interfaces are
 * defined and HTTPS is enabled, only HTTPS interfaces will be returned. If none are defined, an empty list will be
 * returned.
 *
 * @param niFiProperties the NiFiProperties object
 * @return the list of IP addresses
 */
static List<String> extractIPsFromNetworkInterfaces(NiFiProperties niFiProperties) {
    Map<String, String> networkInterfaces = niFiProperties.isHTTPSConfigured() ? niFiProperties.getHttpsNetworkInterfaces() : niFiProperties.getHttpNetworkInterfaces();
    if (isNotDefined(networkInterfaces)) {
        // No custom interfaces defined
        return new ArrayList<>(0);
    } else {
        List<String> allIPAddresses = new ArrayList<>();
        for (Map.Entry<String, String> entry : networkInterfaces.entrySet()) {
            final String networkInterfaceName = entry.getValue();
            try {
                NetworkInterface ni = NetworkInterface.getByName(networkInterfaceName);
                List<String> ipAddresses = Collections.list(ni.getInetAddresses()).stream().map(inetAddress -> inetAddress.getHostAddress().toLowerCase()).collect(Collectors.toList());
                logger.debug("Resolved the following IP addresses for network interface {}: {}", new Object[] { networkInterfaceName, StringUtils.join(ipAddresses, ", ") });
                allIPAddresses.addAll(ipAddresses);
            } catch (SocketException e) {
                logger.warn("Cannot resolve network interface named " + networkInterfaceName);
            }
        }
        // Dedupe while maintaining order
        return uniqueList(allIPAddresses);
    }
}
Also used : Request(org.eclipse.jetty.server.Request) PrintWriter(java.io.PrintWriter) Logger(org.slf4j.Logger) ServletException(javax.servlet.ServletException) LoggerFactory(org.slf4j.LoggerFactory) HttpServletResponse(javax.servlet.http.HttpServletResponse) NetworkInterface(java.net.NetworkInterface) IOException(java.io.IOException) StringUtils(org.apache.commons.lang3.StringUtils) Collectors(java.util.stream.Collectors) ArrayList(java.util.ArrayList) InetAddress(java.net.InetAddress) Objects(java.util.Objects) Strings(com.google.common.base.Strings) HttpServletRequest(javax.servlet.http.HttpServletRequest) SocketException(java.net.SocketException) List(java.util.List) NiFiProperties(org.apache.nifi.util.NiFiProperties) ScopedHandler(org.eclipse.jetty.server.handler.ScopedHandler) Map(java.util.Map) InetAddressUtils(org.apache.http.conn.util.InetAddressUtils) StringEscapeUtils(org.apache.commons.lang3.StringEscapeUtils) Collections(java.util.Collections) LinkedHashSet(java.util.LinkedHashSet) SocketException(java.net.SocketException) ArrayList(java.util.ArrayList) NetworkInterface(java.net.NetworkInterface) Map(java.util.Map)

Example 10 with EMPTY

use of org.apache.commons.lang3.StringUtils.EMPTY in project nifi by apache.

the class Notify method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    final PropertyValue signalIdProperty = context.getProperty(RELEASE_SIGNAL_IDENTIFIER);
    final PropertyValue counterNameProperty = context.getProperty(SIGNAL_COUNTER_NAME);
    final PropertyValue deltaProperty = context.getProperty(SIGNAL_COUNTER_DELTA);
    final String attributeCacheRegex = context.getProperty(ATTRIBUTE_CACHE_REGEX).getValue();
    final Integer bufferCount = context.getProperty(SIGNAL_BUFFER_COUNT).asInteger();
    // the cache client used to interact with the distributed cache.
    final AtomicDistributedMapCacheClient cache = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(AtomicDistributedMapCacheClient.class);
    final WaitNotifyProtocol protocol = new WaitNotifyProtocol(cache);
    final Map<String, SignalBuffer> signalBuffers = new HashMap<>();
    for (int i = 0; i < bufferCount; i++) {
        final FlowFile flowFile = session.get();
        if (flowFile == null) {
            break;
        }
        // Signal id is computed from attribute 'RELEASE_SIGNAL_IDENTIFIER' with expression language support
        final String signalId = signalIdProperty.evaluateAttributeExpressions(flowFile).getValue();
        // if the computed value is null, or empty, we transfer the flow file to failure relationship
        if (StringUtils.isBlank(signalId)) {
            logger.error("FlowFile {} has no attribute for given Release Signal Identifier", new Object[] { flowFile });
            // set 'notified' attribute
            session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
            continue;
        }
        String counterName = counterNameProperty.evaluateAttributeExpressions(flowFile).getValue();
        if (StringUtils.isEmpty(counterName)) {
            counterName = WaitNotifyProtocol.DEFAULT_COUNT_NAME;
        }
        int delta = 1;
        if (deltaProperty.isSet()) {
            final String deltaStr = deltaProperty.evaluateAttributeExpressions(flowFile).getValue();
            try {
                delta = Integer.parseInt(deltaStr);
            } catch (final NumberFormatException e) {
                logger.error("Failed to calculate delta for FlowFile {} due to {}", new Object[] { flowFile, e }, e);
                session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
                continue;
            }
        }
        if (!signalBuffers.containsKey(signalId)) {
            signalBuffers.put(signalId, new SignalBuffer());
        }
        final SignalBuffer signalBuffer = signalBuffers.get(signalId);
        if (StringUtils.isNotEmpty(attributeCacheRegex)) {
            flowFile.getAttributes().entrySet().stream().filter(e -> (!e.getKey().equals("uuid") && e.getKey().matches(attributeCacheRegex))).forEach(e -> signalBuffer.attributesToCache.put(e.getKey(), e.getValue()));
        }
        signalBuffer.incrementDelta(counterName, delta);
        signalBuffer.flowFiles.add(flowFile);
        if (logger.isDebugEnabled()) {
            logger.debug("Cached release signal identifier {} counterName {} from FlowFile {}", new Object[] { signalId, counterName, flowFile });
        }
    }
    signalBuffers.forEach((signalId, signalBuffer) -> {
        // retry after yielding for a while.
        try {
            protocol.notify(signalId, signalBuffer.deltas, signalBuffer.attributesToCache);
            signalBuffer.flowFiles.forEach(flowFile -> session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(true)), REL_SUCCESS));
        } catch (IOException e) {
            throw new RuntimeException(String.format("Unable to communicate with cache when processing %s due to %s", signalId, e), e);
        }
    });
}
Also used : StandardValidators(org.apache.nifi.processor.util.StandardValidators) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) ResultType(org.apache.nifi.expression.AttributeExpression.ResultType) HashMap(java.util.HashMap) EventDriven(org.apache.nifi.annotation.behavior.EventDriven) ComponentLog(org.apache.nifi.logging.ComponentLog) StringUtils(org.apache.commons.lang3.StringUtils) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) PropertyValue(org.apache.nifi.components.PropertyValue) HashSet(java.util.HashSet) Relationship(org.apache.nifi.processor.Relationship) Map(java.util.Map) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) AtomicDistributedMapCacheClient(org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient) FlowFile(org.apache.nifi.flowfile.FlowFile) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) SeeAlso(org.apache.nifi.annotation.documentation.SeeAlso) List(java.util.List) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) SupportsBatching(org.apache.nifi.annotation.behavior.SupportsBatching) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Tags(org.apache.nifi.annotation.documentation.Tags) Collections(java.util.Collections) FlowFile(org.apache.nifi.flowfile.FlowFile) HashMap(java.util.HashMap) PropertyValue(org.apache.nifi.components.PropertyValue) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) AtomicDistributedMapCacheClient(org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient)

Aggregations

List (java.util.List)44 Map (java.util.Map)42 ArrayList (java.util.ArrayList)41 StringUtils (org.apache.commons.lang3.StringUtils)38 Collectors (java.util.stream.Collectors)37 HashMap (java.util.HashMap)33 IOException (java.io.IOException)27 Set (java.util.Set)25 HashSet (java.util.HashSet)22 LoggerFactory (org.slf4j.LoggerFactory)22 Pair (org.apache.commons.lang3.tuple.Pair)20 Logger (org.slf4j.Logger)20 Optional (java.util.Optional)19 Collections (java.util.Collections)17 ImmutablePair (org.apache.commons.lang3.tuple.ImmutablePair)17 java.util (java.util)15 Arrays.asList (java.util.Arrays.asList)14 Collection (java.util.Collection)14 Stream (java.util.stream.Stream)14 Arrays (java.util.Arrays)12