Search in sources :

Example 66 with DMLRuntimeException

use of org.apache.sysml.runtime.DMLRuntimeException in project incubator-systemml by apache.

the class Tsmm2SPInstruction method parseInstruction.

public static Tsmm2SPInstruction parseInstruction(String str) {
    String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
    String opcode = parts[0];
    // check supported opcode
    if (!opcode.equalsIgnoreCase("tsmm2"))
        throw new DMLRuntimeException("Tsmm2SPInstruction.parseInstruction():: Unknown opcode " + opcode);
    CPOperand in1 = new CPOperand(parts[1]);
    CPOperand out = new CPOperand(parts[2]);
    MMTSJType type = MMTSJType.valueOf(parts[3]);
    return new Tsmm2SPInstruction(null, in1, out, type, opcode, str);
}
Also used : CPOperand(org.apache.sysml.runtime.instructions.cp.CPOperand) MMTSJType(org.apache.sysml.lops.MMTSJ.MMTSJType) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException)

Example 67 with DMLRuntimeException

use of org.apache.sysml.runtime.DMLRuntimeException in project incubator-systemml by apache.

the class TsmmSPInstruction method parseInstruction.

public static TsmmSPInstruction parseInstruction(String str) {
    String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
    String opcode = parts[0];
    // check supported opcode
    if (!opcode.equalsIgnoreCase("tsmm"))
        throw new DMLRuntimeException("TsmmSPInstruction.parseInstruction():: Unknown opcode " + opcode);
    CPOperand in1 = new CPOperand(parts[1]);
    CPOperand out = new CPOperand(parts[2]);
    MMTSJType type = MMTSJType.valueOf(parts[3]);
    return new TsmmSPInstruction(null, in1, out, type, opcode, str);
}
Also used : CPOperand(org.apache.sysml.runtime.instructions.cp.CPOperand) MMTSJType(org.apache.sysml.lops.MMTSJ.MMTSJType) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException)

Example 68 with DMLRuntimeException

use of org.apache.sysml.runtime.DMLRuntimeException in project incubator-systemml by apache.

the class UaggOuterChainSPInstruction method parseInstruction.

public static UaggOuterChainSPInstruction parseInstruction(String str) {
    String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
    String opcode = parts[0];
    if (opcode.equalsIgnoreCase(UAggOuterChain.OPCODE)) {
        AggregateUnaryOperator uaggop = InstructionUtils.parseBasicAggregateUnaryOperator(parts[1]);
        BinaryOperator bop = InstructionUtils.parseBinaryOperator(parts[2]);
        CPOperand in1 = new CPOperand(parts[3]);
        CPOperand in2 = new CPOperand(parts[4]);
        CPOperand out = new CPOperand(parts[5]);
        // derive aggregation operator from unary operator
        String aopcode = InstructionUtils.deriveAggregateOperatorOpcode(parts[1]);
        CorrectionLocationType corrLoc = InstructionUtils.deriveAggregateOperatorCorrectionLocation(parts[1]);
        String corrExists = (corrLoc != CorrectionLocationType.NONE) ? "true" : "false";
        AggregateOperator aop = InstructionUtils.parseAggregateOperator(aopcode, corrExists, corrLoc.toString());
        return new UaggOuterChainSPInstruction(bop, uaggop, aop, in1, in2, out, opcode, str);
    } else {
        throw new DMLRuntimeException("UaggOuterChainSPInstruction.parseInstruction():: Unknown opcode " + opcode);
    }
}
Also used : AggregateUnaryOperator(org.apache.sysml.runtime.matrix.operators.AggregateUnaryOperator) AggregateOperator(org.apache.sysml.runtime.matrix.operators.AggregateOperator) CPOperand(org.apache.sysml.runtime.instructions.cp.CPOperand) BinaryOperator(org.apache.sysml.runtime.matrix.operators.BinaryOperator) CorrectionLocationType(org.apache.sysml.lops.PartialAggregate.CorrectionLocationType) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException)

Example 69 with DMLRuntimeException

use of org.apache.sysml.runtime.DMLRuntimeException in project incubator-systemml by apache.

the class WriteSPInstruction method parseInstruction.

public static WriteSPInstruction parseInstruction(String str) {
    String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
    String opcode = parts[0];
    if (!opcode.equals("write")) {
        throw new DMLRuntimeException("Unsupported opcode");
    }
    // Write instructions for csv files also include three additional parameters (hasHeader, delimiter, sparse)
    if (parts.length != 5 && parts.length != 9) {
        throw new DMLRuntimeException("Invalid number of operands in write instruction: " + str);
    }
    // SPARK°write°_mVar2·MATRIX·DOUBLE°./src/test/scripts/functions/data/out/B·SCALAR·STRING·true°matrixmarket·SCALAR·STRING·true
    // _mVar2·MATRIX·DOUBLE
    CPOperand in1 = new CPOperand(parts[1]);
    CPOperand in2 = new CPOperand(parts[2]);
    CPOperand in3 = new CPOperand(parts[3]);
    WriteSPInstruction inst = new WriteSPInstruction(in1, in2, in3, opcode, str);
    if (in3.getName().equalsIgnoreCase("csv")) {
        boolean hasHeader = Boolean.parseBoolean(parts[4]);
        String delim = parts[5];
        boolean sparse = Boolean.parseBoolean(parts[6]);
        FileFormatProperties formatProperties = new CSVFileFormatProperties(hasHeader, delim, sparse);
        inst.setFormatProperties(formatProperties);
        CPOperand in4 = new CPOperand(parts[8]);
        inst.input4 = in4;
    } else {
        FileFormatProperties ffp = new FileFormatProperties();
        CPOperand in4 = new CPOperand(parts[4]);
        inst.input4 = in4;
        inst.setFormatProperties(ffp);
    }
    return inst;
}
Also used : CSVFileFormatProperties(org.apache.sysml.runtime.matrix.data.CSVFileFormatProperties) FileFormatProperties(org.apache.sysml.runtime.matrix.data.FileFormatProperties) CSVFileFormatProperties(org.apache.sysml.runtime.matrix.data.CSVFileFormatProperties) CPOperand(org.apache.sysml.runtime.instructions.cp.CPOperand) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException)

Example 70 with DMLRuntimeException

use of org.apache.sysml.runtime.DMLRuntimeException in project incubator-systemml by apache.

the class WriteSPInstruction method processMatrixWriteInstruction.

protected void processMatrixWriteInstruction(SparkExecutionContext sec, String fname, OutputInfo oi) throws IOException {
    // get input rdd
    JavaPairRDD<MatrixIndexes, MatrixBlock> in1 = sec.getBinaryBlockRDDHandleForVariable(input1.getName());
    MatrixCharacteristics mc = sec.getMatrixCharacteristics(input1.getName());
    if (oi == OutputInfo.MatrixMarketOutputInfo || oi == OutputInfo.TextCellOutputInfo) {
        // piggyback nnz maintenance on write
        LongAccumulator aNnz = null;
        if (!mc.nnzKnown()) {
            aNnz = sec.getSparkContext().sc().longAccumulator("nnz");
            in1 = in1.mapValues(new ComputeBinaryBlockNnzFunction(aNnz));
        }
        JavaRDD<String> header = null;
        if (oi == OutputInfo.MatrixMarketOutputInfo) {
            ArrayList<String> headerContainer = new ArrayList<>(1);
            // First output MM header
            String headerStr = "%%MatrixMarket matrix coordinate real general\n" + // output number of rows, number of columns and number of nnz
            mc.getRows() + " " + mc.getCols() + " " + mc.getNonZeros();
            headerContainer.add(headerStr);
            header = sec.getSparkContext().parallelize(headerContainer);
        }
        JavaRDD<String> ijv = RDDConverterUtils.binaryBlockToTextCell(in1, mc);
        if (header != null)
            customSaveTextFile(header.union(ijv), fname, true);
        else
            customSaveTextFile(ijv, fname, false);
        if (!mc.nnzKnown())
            mc.setNonZeros(aNnz.value());
    } else if (oi == OutputInfo.CSVOutputInfo) {
        if (mc.getRows() == 0 || mc.getCols() == 0) {
            throw new IOException("Write of matrices with zero rows or columns" + " not supported (" + mc.getRows() + "x" + mc.getCols() + ").");
        }
        LongAccumulator aNnz = null;
        // piggyback nnz computation on actual write
        if (!mc.nnzKnown()) {
            aNnz = sec.getSparkContext().sc().longAccumulator("nnz");
            in1 = in1.mapValues(new ComputeBinaryBlockNnzFunction(aNnz));
        }
        JavaRDD<String> out = RDDConverterUtils.binaryBlockToCsv(in1, mc, (CSVFileFormatProperties) formatProperties, true);
        customSaveTextFile(out, fname, false);
        if (!mc.nnzKnown())
            mc.setNonZeros((long) aNnz.value().longValue());
    } else if (oi == OutputInfo.BinaryBlockOutputInfo) {
        // piggyback nnz computation on actual write
        LongAccumulator aNnz = null;
        if (!mc.nnzKnown()) {
            aNnz = sec.getSparkContext().sc().longAccumulator("nnz");
            in1 = in1.mapValues(new ComputeBinaryBlockNnzFunction(aNnz));
        }
        // save binary block rdd on hdfs
        in1.saveAsHadoopFile(fname, MatrixIndexes.class, MatrixBlock.class, SequenceFileOutputFormat.class);
        if (!mc.nnzKnown())
            mc.setNonZeros((long) aNnz.value().longValue());
    } else {
        // unsupported formats: binarycell (not externalized)
        throw new DMLRuntimeException("Unexpected data format: " + OutputInfo.outputInfoToString(oi));
    }
    // write meta data file
    MapReduceTool.writeMetaDataFile(fname + ".mtd", ValueType.DOUBLE, mc, oi, formatProperties);
}
Also used : MatrixBlock(org.apache.sysml.runtime.matrix.data.MatrixBlock) CSVFileFormatProperties(org.apache.sysml.runtime.matrix.data.CSVFileFormatProperties) MatrixIndexes(org.apache.sysml.runtime.matrix.data.MatrixIndexes) ArrayList(java.util.ArrayList) IOException(java.io.IOException) ComputeBinaryBlockNnzFunction(org.apache.sysml.runtime.instructions.spark.functions.ComputeBinaryBlockNnzFunction) MatrixCharacteristics(org.apache.sysml.runtime.matrix.MatrixCharacteristics) JavaRDD(org.apache.spark.api.java.JavaRDD) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException) LongAccumulator(org.apache.spark.util.LongAccumulator)

Aggregations

DMLRuntimeException (org.apache.sysml.runtime.DMLRuntimeException)579 MatrixBlock (org.apache.sysml.runtime.matrix.data.MatrixBlock)104 IOException (java.io.IOException)102 MatrixCharacteristics (org.apache.sysml.runtime.matrix.MatrixCharacteristics)85 MatrixObject (org.apache.sysml.runtime.controlprogram.caching.MatrixObject)78 ArrayList (java.util.ArrayList)75 CPOperand (org.apache.sysml.runtime.instructions.cp.CPOperand)49 Path (org.apache.hadoop.fs.Path)43 MatrixIndexes (org.apache.sysml.runtime.matrix.data.MatrixIndexes)40 ExecutorService (java.util.concurrent.ExecutorService)38 Pointer (jcuda.Pointer)37 Future (java.util.concurrent.Future)35 CSRPointer (org.apache.sysml.runtime.instructions.gpu.context.CSRPointer)30 MetaDataFormat (org.apache.sysml.runtime.matrix.MetaDataFormat)26 FrameBlock (org.apache.sysml.runtime.matrix.data.FrameBlock)26 FileSystem (org.apache.hadoop.fs.FileSystem)25 JobConf (org.apache.hadoop.mapred.JobConf)23 Operator (org.apache.sysml.runtime.matrix.operators.Operator)22 KahanObject (org.apache.sysml.runtime.instructions.cp.KahanObject)20 SparkExecutionContext (org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext)19