Search in sources :

Example 16 with Pair

use of org.apache.beam.repackaged.core.org.apache.commons.lang3.tuple.Pair in project asterixdb by apache.

the class CodeGenUtil method generateScalarFunctionDescriptorBinary.

/**
     * Generates the byte code for a scalar function descriptor.
     *
     * @param packagePrefix,
     *            the prefix of evaluators for code generation.
     * @param originalFuncDescriptorClassName,
     *            the original class name of the function descriptor.
     * @param suffixForGeneratedClass,
     *            the suffix for the generated class.
     * @param action,
     *            the customized action for the generated class definition bytes.
     * @throws IOException
     * @throws ClassNotFoundException
     */
public static List<Pair<String, String>> generateScalarFunctionDescriptorBinary(String packagePrefix, String originalFuncDescriptorClassName, String suffixForGeneratedClass, ClassLoader classLoader, ClassByteCodeAction action) throws IOException, ClassNotFoundException {
    originalFuncDescriptorClassName = toInternalClassName(originalFuncDescriptorClassName);
    if (originalFuncDescriptorClassName.equals(DESCRIPTOR_SUPER_CLASS_NAME)) {
        return Collections.emptyList();
    }
    String targetFuncDescriptorClassName = getGeneratedFunctionDescriptorInternalClassName(originalFuncDescriptorClassName, suffixForGeneratedClass);
    // Adds the mapping of the old/new names of the function descriptor.
    List<Pair<String, String>> nameMappings = new ArrayList<>();
    // Generates code for super classes except java.lang.Object.
    Class<?> evaluatorClass = CodeGenUtil.class.getClassLoader().loadClass(toJdkStandardName(originalFuncDescriptorClassName));
    nameMappings.addAll(generateScalarFunctionDescriptorBinary(packagePrefix, evaluatorClass.getSuperclass().getName(), suffixForGeneratedClass, classLoader, action));
    nameMappings.add(Pair.of(originalFuncDescriptorClassName, targetFuncDescriptorClassName));
    nameMappings.add(Pair.of(toJdkStandardName(originalFuncDescriptorClassName), toJdkStandardName(targetFuncDescriptorClassName)));
    // Gathers evaluator factory classes that are created in the function descriptor.
    ClassReader reader = new ClassReader(getResourceStream(originalFuncDescriptorClassName, classLoader));
    GatherEvaluatorFactoryCreationVisitor evalFactoryCreationVisitor = new GatherEvaluatorFactoryCreationVisitor(toInternalClassName(packagePrefix));
    reader.accept(evalFactoryCreationVisitor, 0);
    Set<String> evaluatorFactoryClassNames = evalFactoryCreationVisitor.getCreatedEvaluatorFactoryClassNames();
    // Generates inner classes other than evaluator factories.
    generateNonEvalInnerClasses(reader, evaluatorFactoryClassNames, nameMappings, suffixForGeneratedClass, classLoader, action);
    // Generates evaluator factories that are created in the function descriptor.
    int evalFactoryCounter = 0;
    for (String evaluateFactoryClassName : evaluatorFactoryClassNames) {
        generateEvaluatorFactoryClassBinary(packagePrefix, evaluateFactoryClassName, suffixForGeneratedClass, evalFactoryCounter++, nameMappings, classLoader, action);
    }
    // Transforms the function descriptor class and outputs the generated class binary.
    ClassWriter writer = new ClassWriter(reader, 0);
    RenameClassVisitor renamingVisitor = new RenameClassVisitor(writer, nameMappings);
    reader.accept(renamingVisitor, 0);
    action.runAction(targetFuncDescriptorClassName, writer.toByteArray());
    return nameMappings;
}
Also used : ArrayList(java.util.ArrayList) ClassReader(org.objectweb.asm.ClassReader) ClassWriter(org.objectweb.asm.ClassWriter) Pair(org.apache.commons.lang3.tuple.Pair)

Example 17 with Pair

use of org.apache.beam.repackaged.core.org.apache.commons.lang3.tuple.Pair in project incubator-systemml by apache.

the class PlanSelectionFuseCostBased method rGetPlanCosts.

private static double rGetPlanCosts(CPlanMemoTable memo, Hop current, HashSet<Pair<Long, Long>> visited, HashSet<Long> partition, ArrayList<Long> M, boolean[] plan, HashMap<Long, Double> computeCosts, CostVector costsCurrent, TemplateType currentType) {
    //memoization per hop id and cost vector to account for redundant
    //computation without double counting materialized results or compute
    //costs of complex operation DAGs within a single fused operator
    Pair<Long, Long> tag = Pair.of(current.getHopID(), (costsCurrent == null) ? 0 : costsCurrent.ID);
    if (visited.contains(tag))
        return 0;
    visited.add(tag);
    //open template if necessary, including memoization
    //under awareness of current plan choice
    MemoTableEntry best = null;
    boolean opened = false;
    if (memo.contains(current.getHopID())) {
        if (currentType == null) {
            best = memo.get(current.getHopID()).stream().filter(p -> isValid(p, current)).filter(p -> hasNoRefToMaterialization(p, M, plan)).min(new BasicPlanComparator()).orElse(null);
            opened = true;
        } else {
            best = memo.get(current.getHopID()).stream().filter(p -> p.type == currentType || p.type == TemplateType.CellTpl).filter(p -> hasNoRefToMaterialization(p, M, plan)).min(Comparator.comparing(p -> 7 - ((p.type == currentType) ? 4 : 0) - p.countPlanRefs())).orElse(null);
        }
    }
    //create new cost vector if opened, initialized with write costs
    CostVector costVect = !opened ? costsCurrent : new CostVector(Math.max(current.getDim1(), 1) * Math.max(current.getDim2(), 1));
    //add compute costs of current operator to costs vector 
    if (partition.contains(current.getHopID()))
        costVect.computeCosts += computeCosts.get(current.getHopID());
    //process children recursively
    double costs = 0;
    for (int i = 0; i < current.getInput().size(); i++) {
        Hop c = current.getInput().get(i);
        if (best != null && best.isPlanRef(i))
            costs += rGetPlanCosts(memo, c, visited, partition, M, plan, computeCosts, costVect, best.type);
        else if (best != null && isImplicitlyFused(current, i, best.type))
            costVect.addInputSize(c.getInput().get(0).getHopID(), Math.max(c.getDim1(), 1) * Math.max(c.getDim2(), 1));
        else {
            //include children and I/O costs
            costs += rGetPlanCosts(memo, c, visited, partition, M, plan, computeCosts, null, null);
            if (costVect != null && c.getDataType().isMatrix())
                costVect.addInputSize(c.getHopID(), Math.max(c.getDim1(), 1) * Math.max(c.getDim2(), 1));
        }
    }
    //add costs for opened fused operator
    if (partition.contains(current.getHopID())) {
        if (opened) {
            if (LOG.isTraceEnabled())
                LOG.trace("Cost vector for fused operator (hop " + current.getHopID() + "): " + costVect);
            //time for output write
            costs += costVect.outSize * 8 / WRITE_BANDWIDTH;
            costs += Math.max(costVect.computeCosts * costVect.getMaxInputSize() / COMPUTE_BANDWIDTH, costVect.getSumInputSizes() * 8 / READ_BANDWIDTH);
        } else //add costs for non-partition read in the middle of fused operator
        if (hasNonPartitionConsumer(current, partition)) {
            costs += rGetPlanCosts(memo, current, visited, partition, M, plan, computeCosts, null, null);
        }
    }
    //sanity check non-negative costs
    if (costs < 0 || Double.isNaN(costs) || Double.isInfinite(costs))
        throw new RuntimeException("Wrong cost estimate: " + costs);
    return costs;
}
Also used : Arrays(java.util.Arrays) IndexingOp(org.apache.sysml.hops.IndexingOp) HashMap(java.util.HashMap) AggUnaryOp(org.apache.sysml.hops.AggUnaryOp) ParameterizedBuiltinOp(org.apache.sysml.hops.ParameterizedBuiltinOp) AggOp(org.apache.sysml.hops.Hop.AggOp) ArrayList(java.util.ArrayList) LiteralOp(org.apache.sysml.hops.LiteralOp) HashSet(java.util.HashSet) MemoTableEntry(org.apache.sysml.hops.codegen.template.CPlanMemoTable.MemoTableEntry) Pair(org.apache.commons.lang3.tuple.Pair) ReorgOp(org.apache.sysml.hops.ReorgOp) IDSequence(org.apache.sysml.runtime.controlprogram.parfor.util.IDSequence) CollectionUtils(org.apache.commons.collections.CollectionUtils) InfrastructureAnalyzer(org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer) AggBinaryOp(org.apache.sysml.hops.AggBinaryOp) TernaryOp(org.apache.sysml.hops.TernaryOp) Iterator(java.util.Iterator) Collection(java.util.Collection) TemplateType(org.apache.sysml.hops.codegen.template.TemplateBase.TemplateType) BinaryOp(org.apache.sysml.hops.BinaryOp) Collectors(java.util.stream.Collectors) Direction(org.apache.sysml.hops.Hop.Direction) Hop(org.apache.sysml.hops.Hop) List(java.util.List) Entry(java.util.Map.Entry) Log(org.apache.commons.logging.Log) LogFactory(org.apache.commons.logging.LogFactory) Comparator(java.util.Comparator) HopRewriteUtils(org.apache.sysml.hops.rewrite.HopRewriteUtils) UnaryOp(org.apache.sysml.hops.UnaryOp) MemoTableEntry(org.apache.sysml.hops.codegen.template.CPlanMemoTable.MemoTableEntry) Hop(org.apache.sysml.hops.Hop)

Example 18 with Pair

use of org.apache.beam.repackaged.core.org.apache.commons.lang3.tuple.Pair in project asterixdb by apache.

the class FramewriterTest method createWriters.

/**
     * @return a list of writers to test. these writers can be of the same type but behave differently based on included mocks
     * @throws HyracksDataException
     * @throws IndexException
     */
public IFrameWriter[] createWriters() throws HyracksDataException {
    ArrayList<BTreeSearchOperatorNodePushable> writers = new ArrayList<>();
    Pair<IIndexDataflowHelperFactory, ISearchOperationCallbackFactory>[] pairs = pairs();
    IRecordDescriptorProvider[] recordDescProviders = mockRecDescProviders();
    int partition = 0;
    IHyracksTaskContext[] ctxs = mockIHyracksTaskContext();
    int[] keys = { 0 };
    boolean lowKeyInclusive = true;
    boolean highKeyInclusive = true;
    for (Pair<IIndexDataflowHelperFactory, ISearchOperationCallbackFactory> pair : pairs) {
        for (IRecordDescriptorProvider recordDescProvider : recordDescProviders) {
            for (IHyracksTaskContext ctx : ctxs) {
                BTreeSearchOperatorNodePushable writer = new BTreeSearchOperatorNodePushable(ctx, partition, recordDescProvider.getInputRecordDescriptor(new ActivityId(new OperatorDescriptorId(0), 0), 0), keys, keys, lowKeyInclusive, highKeyInclusive, keys, keys, pair.getLeft(), false, false, null, pair.getRight(), false);
                writers.add(writer);
            }
        }
    }
    // Create the framewriter using the mocks
    return writers.toArray(new IFrameWriter[writers.size()]);
}
Also used : OperatorDescriptorId(org.apache.hyracks.api.dataflow.OperatorDescriptorId) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) ArrayList(java.util.ArrayList) IRecordDescriptorProvider(org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider) ISearchOperationCallbackFactory(org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) BTreeSearchOperatorNodePushable(org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorNodePushable) Pair(org.apache.commons.lang3.tuple.Pair)

Example 19 with Pair

use of org.apache.beam.repackaged.core.org.apache.commons.lang3.tuple.Pair in project beam by apache.

the class TranslationContext method populateDAG.

public void populateDAG(DAG dag) {
    for (Map.Entry<String, Operator> nameAndOperator : this.operators.entrySet()) {
        dag.addOperator(nameAndOperator.getKey(), nameAndOperator.getValue());
    }
    int streamIndex = 0;
    for (Map.Entry<PCollection, Pair<OutputPortInfo, List<InputPortInfo>>> streamEntry : this.streams.entrySet()) {
        List<InputPortInfo> destInfo = streamEntry.getValue().getRight();
        InputPort[] sinks = new InputPort[destInfo.size()];
        for (int i = 0; i < sinks.length; i++) {
            sinks[i] = destInfo.get(i).port;
        }
        if (sinks.length > 0) {
            DAG.StreamMeta streamMeta = dag.addStream("stream" + streamIndex++, streamEntry.getValue().getLeft().port, sinks);
            if (pipelineOptions.isParDoFusionEnabled()) {
                optimizeStreams(streamMeta, streamEntry);
            }
            for (InputPort port : sinks) {
                PCollection pc = streamEntry.getKey();
                Coder coder = pc.getCoder();
                if (pc.getWindowingStrategy() != null) {
                    coder = FullWindowedValueCoder.of(pc.getCoder(), pc.getWindowingStrategy().getWindowFn().windowCoder());
                }
                Coder<Object> wrapperCoder = ApexStreamTuple.ApexStreamTupleCoder.of(coder);
                CoderAdapterStreamCodec streamCodec = new CoderAdapterStreamCodec(wrapperCoder);
                dag.setInputPortAttribute(port, PortContext.STREAM_CODEC, streamCodec);
            }
        }
    }
}
Also used : Operator(com.datatorrent.api.Operator) Coder(org.apache.beam.sdk.coders.Coder) FullWindowedValueCoder(org.apache.beam.sdk.util.WindowedValue.FullWindowedValueCoder) InputPort(com.datatorrent.api.Operator.InputPort) DAG(com.datatorrent.api.DAG) CoderAdapterStreamCodec(org.apache.beam.runners.apex.translation.utils.CoderAdapterStreamCodec) PCollection(org.apache.beam.sdk.values.PCollection) HashMap(java.util.HashMap) Map(java.util.Map) Pair(org.apache.commons.lang3.tuple.Pair) ImmutablePair(org.apache.commons.lang3.tuple.ImmutablePair)

Example 20 with Pair

use of org.apache.beam.repackaged.core.org.apache.commons.lang3.tuple.Pair in project disunity by ata4.

the class BundleInfo method buildHeaderTable.

private Table<Integer, Integer, Object> buildHeaderTable(BundleHeader header) {
    TableBuilder table = new TableBuilder();
    table.row("Field", "Value");
    table.row("signature", header.signature());
    table.row("streamVersion", header.streamVersion());
    table.row("unityVersion", header.unityVersion());
    table.row("unityRevision", header.unityRevision());
    table.row("minimumStreamedBytes", header.minimumStreamedBytes());
    table.row("headerSize", header.headerSize());
    table.row("numberOfLevelsToDownload", header.numberOfLevelsToDownload());
    table.row("numberOfLevels", header.numberOfLevels());
    List<Pair<Long, Long>> levelByteEnds = header.levelByteEnd();
    for (int i = 0; i < levelByteEnds.size(); i++) {
        Pair<Long, Long> levelByteEnd = levelByteEnds.get(i);
        table.row("levelByteEnd[" + i + "][0]", levelByteEnd.getLeft());
        table.row("levelByteEnd[" + i + "][1]", levelByteEnd.getRight());
    }
    if (header.streamVersion() >= 2) {
        table.row("completeFileSize", header.completeFileSize());
    }
    if (header.streamVersion() >= 3) {
        table.row("dataHeaderSize", header.dataHeaderSize());
    }
    return table.get();
}
Also used : TableBuilder(info.ata4.disunity.cli.util.TableBuilder) Pair(org.apache.commons.lang3.tuple.Pair)

Aggregations

Pair (org.apache.commons.lang3.tuple.Pair)685 ArrayList (java.util.ArrayList)209 List (java.util.List)154 Test (org.junit.Test)150 ImmutablePair (org.apache.commons.lang3.tuple.ImmutablePair)142 HashMap (java.util.HashMap)123 Collectors (java.util.stream.Collectors)123 Map (java.util.Map)112 Message (com.microsoft.azure.sdk.iot.device.Message)71 IOException (java.io.IOException)70 MutablePair (org.apache.commons.lang3.tuple.MutablePair)64 java.util (java.util)55 IotHubTransportMessage (com.microsoft.azure.sdk.iot.device.transport.IotHubTransportMessage)52 Set (java.util.Set)49 StringUtils (org.apache.commons.lang3.StringUtils)48 File (java.io.File)46 Optional (java.util.Optional)45 Arrays (java.util.Arrays)44 HashSet (java.util.HashSet)40 Test (org.junit.jupiter.api.Test)39