Search in sources :

Example 46 with DMLRuntimeException

use of org.apache.sysml.runtime.DMLRuntimeException in project incubator-systemml by apache.

the class DMLConfig method setTextValue.

/**
 * Method to update the key value
 * @param paramName parameter name
 * @param paramValue parameter value
 */
public void setTextValue(String paramName, String paramValue) {
    if (_xmlRoot != null) {
        NodeList list = _xmlRoot.getElementsByTagName(paramName);
        if (list != null && list.getLength() > 0) {
            Element elem = (Element) list.item(0);
            elem.getFirstChild().setNodeValue(paramValue);
        } else {
            Node value = _document.createTextNode(paramValue);
            Node element = _document.createElement(paramName);
            element.appendChild(value);
            _xmlRoot.appendChild(element);
        }
    } else {
        try {
            DocumentBuilder builder = getDocumentBuilder();
            String configString = "<root><" + paramName + ">" + paramValue + "</" + paramName + "></root>";
            _document = builder.parse(new ByteArrayInputStream(configString.getBytes("UTF-8")));
            _xmlRoot = _document.getDocumentElement();
        } catch (Exception e) {
            throw new DMLRuntimeException("Unable to set config value", e);
        }
    }
}
Also used : DocumentBuilder(javax.xml.parsers.DocumentBuilder) ByteArrayInputStream(java.io.ByteArrayInputStream) NodeList(org.w3c.dom.NodeList) Element(org.w3c.dom.Element) Node(org.w3c.dom.Node) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) ParserConfigurationException(javax.xml.parsers.ParserConfigurationException) SAXException(org.xml.sax.SAXException) ParseException(org.apache.sysml.parser.ParseException) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException)

Example 47 with DMLRuntimeException

use of org.apache.sysml.runtime.DMLRuntimeException in project incubator-systemml by apache.

the class DMLScript method checkSecuritySetup.

private static void checkSecuritySetup(DMLConfig config) throws IOException, DMLRuntimeException {
    // analyze local configuration
    String userName = System.getProperty("user.name");
    HashSet<String> groupNames = new HashSet<>();
    try {
        // check existence, for backwards compatibility to < hadoop 0.21
        if (UserGroupInformation.class.getMethod("getCurrentUser") != null) {
            String[] groups = UserGroupInformation.getCurrentUser().getGroupNames();
            Collections.addAll(groupNames, groups);
        }
    } catch (Exception ex) {
    }
    // analyze hadoop configuration
    JobConf job = ConfigurationManager.getCachedJobConf();
    boolean localMode = InfrastructureAnalyzer.isLocalMode(job);
    String taskController = job.get(MRConfigurationNames.MR_TASKTRACKER_TASKCONTROLLER, "org.apache.hadoop.mapred.DefaultTaskController");
    String ttGroupName = job.get(MRConfigurationNames.MR_TASKTRACKER_GROUP, "null");
    // note: job.get("dfs.permissions.supergroup",null);
    String perm = job.get(MRConfigurationNames.DFS_PERMISSIONS_ENABLED, "null");
    URI fsURI = FileSystem.getDefaultUri(job);
    // determine security states
    boolean flagDiffUser = !(// runs map/reduce tasks as the current user
    taskController.equals("org.apache.hadoop.mapred.LinuxTaskController") || // run in the same JVM anyway
    localMode || // user in task tracker group
    groupNames.contains(ttGroupName));
    boolean flagLocalFS = fsURI == null || fsURI.getScheme().equals("file");
    boolean flagSecurity = perm.equals("yes");
    LOG.debug("SystemML security check: " + "local.user.name = " + userName + ", " + "local.user.groups = " + ProgramConverter.serializeStringCollection(groupNames) + ", " + MRConfigurationNames.MR_JOBTRACKER_ADDRESS + " = " + job.get(MRConfigurationNames.MR_JOBTRACKER_ADDRESS) + ", " + MRConfigurationNames.MR_TASKTRACKER_TASKCONTROLLER + " = " + taskController + "," + MRConfigurationNames.MR_TASKTRACKER_GROUP + " = " + ttGroupName + ", " + MRConfigurationNames.FS_DEFAULTFS + " = " + ((fsURI != null) ? fsURI.getScheme() : "null") + ", " + MRConfigurationNames.DFS_PERMISSIONS_ENABLED + " = " + perm);
    // print warning if permission issues possible
    if (flagDiffUser && (flagLocalFS || flagSecurity)) {
        LOG.warn("Cannot run map/reduce tasks as user '" + userName + "'. Using tasktracker group '" + ttGroupName + "'.");
    }
}
Also used : JobConf(org.apache.hadoop.mapred.JobConf) URI(java.net.URI) DMLScriptException(org.apache.sysml.runtime.DMLScriptException) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException) LanguageException(org.apache.sysml.parser.LanguageException) IOException(java.io.IOException) AlreadySelectedException(org.apache.commons.cli.AlreadySelectedException) ParseException(org.apache.sysml.parser.ParseException) HashSet(java.util.HashSet) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 48 with DMLRuntimeException

use of org.apache.sysml.runtime.DMLRuntimeException in project incubator-systemml by apache.

the class FrameAppendMSPInstruction method processInstruction.

@Override
public void processInstruction(ExecutionContext ec) {
    // map-only append (rhs must be vector and fit in mapper mem)
    SparkExecutionContext sec = (SparkExecutionContext) ec;
    checkBinaryAppendInputCharacteristics(sec, _cbind, false, false);
    JavaPairRDD<Long, FrameBlock> in1 = sec.getFrameBinaryBlockRDDHandleForVariable(input1.getName());
    PartitionedBroadcast<FrameBlock> in2 = sec.getBroadcastForFrameVariable(input2.getName());
    // execute map-append operations (partitioning preserving if keys for blocks not changing)
    JavaPairRDD<Long, FrameBlock> out = null;
    if (preservesPartitioning(_cbind)) {
        out = in1.mapPartitionsToPair(new MapSideAppendPartitionFunction(in2), true);
    } else
        throw new DMLRuntimeException("Append type rbind not supported for frame mappend, instead use rappend");
    // put output RDD handle into symbol table
    updateBinaryAppendOutputMatrixCharacteristics(sec, _cbind);
    sec.setRDDHandleForVariable(output.getName(), out);
    sec.addLineageRDD(output.getName(), input1.getName());
    sec.addLineageBroadcast(output.getName(), input2.getName());
    // update schema of output with merged input schemas
    sec.getFrameObject(output.getName()).setSchema(sec.getFrameObject(input1.getName()).mergeSchemas(sec.getFrameObject(input2.getName())));
}
Also used : FrameBlock(org.apache.sysml.runtime.matrix.data.FrameBlock) SparkExecutionContext(org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException)

Example 49 with DMLRuntimeException

use of org.apache.sysml.runtime.DMLRuntimeException in project incubator-systemml by apache.

the class FrameIndexingSPInstruction method processInstruction.

@Override
public void processInstruction(ExecutionContext ec) {
    SparkExecutionContext sec = (SparkExecutionContext) ec;
    String opcode = getOpcode();
    // get indexing range
    long rl = ec.getScalarInput(rowLower.getName(), rowLower.getValueType(), rowLower.isLiteral()).getLongValue();
    long ru = ec.getScalarInput(rowUpper.getName(), rowUpper.getValueType(), rowUpper.isLiteral()).getLongValue();
    long cl = ec.getScalarInput(colLower.getName(), colLower.getValueType(), colLower.isLiteral()).getLongValue();
    long cu = ec.getScalarInput(colUpper.getName(), colUpper.getValueType(), colUpper.isLiteral()).getLongValue();
    IndexRange ixrange = new IndexRange(rl, ru, cl, cu);
    // right indexing
    if (opcode.equalsIgnoreCase(RightIndex.OPCODE)) {
        // update and check output dimensions
        MatrixCharacteristics mcIn = sec.getMatrixCharacteristics(input1.getName());
        MatrixCharacteristics mcOut = sec.getMatrixCharacteristics(output.getName());
        mcOut.set(ru - rl + 1, cu - cl + 1, mcIn.getRowsPerBlock(), mcIn.getColsPerBlock());
        checkValidOutputDimensions(mcOut);
        // execute right indexing operation (partitioning-preserving if possible)
        JavaPairRDD<Long, FrameBlock> in1 = sec.getFrameBinaryBlockRDDHandleForVariable(input1.getName());
        JavaPairRDD<Long, FrameBlock> out = null;
        if (isPartitioningPreservingRightIndexing(mcIn, ixrange)) {
            out = in1.mapPartitionsToPair(new SliceBlockPartitionFunction(ixrange, mcOut), true);
        } else {
            out = in1.filter(new IsFrameBlockInRange(rl, ru, mcOut)).mapToPair(new SliceBlock(ixrange, mcOut));
        }
        // put output RDD handle into symbol table
        sec.setRDDHandleForVariable(output.getName(), out);
        sec.addLineageRDD(output.getName(), input1.getName());
        // update schema of output with subset of input schema
        sec.getFrameObject(output.getName()).setSchema(sec.getFrameObject(input1.getName()).getSchema((int) cl, (int) cu));
    } else // left indexing
    if (opcode.equalsIgnoreCase(LeftIndex.OPCODE) || opcode.equalsIgnoreCase("mapLeftIndex")) {
        JavaPairRDD<Long, FrameBlock> in1 = sec.getFrameBinaryBlockRDDHandleForVariable(input1.getName());
        PartitionedBroadcast<FrameBlock> broadcastIn2 = null;
        JavaPairRDD<Long, FrameBlock> in2 = null;
        JavaPairRDD<Long, FrameBlock> out = null;
        // update and check output dimensions
        MatrixCharacteristics mcOut = sec.getMatrixCharacteristics(output.getName());
        MatrixCharacteristics mcLeft = ec.getMatrixCharacteristics(input1.getName());
        mcOut.set(mcLeft.getRows(), mcLeft.getCols(), mcLeft.getRowsPerBlock(), mcLeft.getColsPerBlock());
        checkValidOutputDimensions(mcOut);
        // note: always frame rhs, scalars are preprocessed via cast to 1x1 frame
        MatrixCharacteristics mcRight = ec.getMatrixCharacteristics(input2.getName());
        // sanity check matching index range and rhs dimensions
        if (!mcRight.dimsKnown()) {
            throw new DMLRuntimeException("The right input frame dimensions are not specified for FrameIndexingSPInstruction");
        }
        if (!(ru - rl + 1 == mcRight.getRows() && cu - cl + 1 == mcRight.getCols())) {
            throw new DMLRuntimeException("Invalid index range of leftindexing: [" + rl + ":" + ru + "," + cl + ":" + cu + "] vs [" + mcRight.getRows() + "x" + mcRight.getCols() + "].");
        }
        if (opcode.equalsIgnoreCase("mapLeftIndex")) {
            broadcastIn2 = sec.getBroadcastForFrameVariable(input2.getName());
            // partitioning-preserving mappartitions (key access required for broadcast loopkup)
            out = in1.mapPartitionsToPair(new LeftIndexPartitionFunction(broadcastIn2, ixrange, mcOut), true);
        } else {
            // general case
            // zero-out lhs
            in1 = in1.flatMapToPair(new ZeroOutLHS(false, ixrange, mcLeft));
            // slice rhs, shift and merge with lhs
            in2 = sec.getFrameBinaryBlockRDDHandleForVariable(input2.getName()).flatMapToPair(new SliceRHSForLeftIndexing(ixrange, mcLeft));
            out = FrameRDDAggregateUtils.mergeByKey(in1.union(in2));
        }
        sec.setRDDHandleForVariable(output.getName(), out);
        sec.addLineageRDD(output.getName(), input1.getName());
        if (broadcastIn2 != null)
            sec.addLineageBroadcast(output.getName(), input2.getName());
        if (in2 != null)
            sec.addLineageRDD(output.getName(), input2.getName());
    } else
        throw new DMLRuntimeException("Invalid opcode (" + opcode + ") encountered in FrameIndexingSPInstruction.");
}
Also used : IsFrameBlockInRange(org.apache.sysml.runtime.instructions.spark.functions.IsFrameBlockInRange) MatrixCharacteristics(org.apache.sysml.runtime.matrix.MatrixCharacteristics) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException) IndexRange(org.apache.sysml.runtime.util.IndexRange) PartitionedBroadcast(org.apache.sysml.runtime.instructions.spark.data.PartitionedBroadcast) FrameBlock(org.apache.sysml.runtime.matrix.data.FrameBlock) JavaPairRDD(org.apache.spark.api.java.JavaPairRDD) SparkExecutionContext(org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext)

Example 50 with DMLRuntimeException

use of org.apache.sysml.runtime.DMLRuntimeException in project incubator-systemml by apache.

the class MapmmChainSPInstruction method parseInstruction.

public static MapmmChainSPInstruction parseInstruction(String str) {
    String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
    InstructionUtils.checkNumFields(parts, 4, 5);
    String opcode = parts[0];
    // check supported opcode
    if (!opcode.equalsIgnoreCase(MapMultChain.OPCODE)) {
        throw new DMLRuntimeException("MapmmChainSPInstruction.parseInstruction():: Unknown opcode " + opcode);
    }
    // parse instruction parts (without exec type)
    CPOperand in1 = new CPOperand(parts[1]);
    CPOperand in2 = new CPOperand(parts[2]);
    if (parts.length == 5) {
        CPOperand out = new CPOperand(parts[3]);
        ChainType type = ChainType.valueOf(parts[4]);
        return new MapmmChainSPInstruction(null, in1, in2, out, type, opcode, str);
    } else // parts.length==6
    {
        CPOperand in3 = new CPOperand(parts[3]);
        CPOperand out = new CPOperand(parts[4]);
        ChainType type = ChainType.valueOf(parts[5]);
        return new MapmmChainSPInstruction(null, in1, in2, in3, out, type, opcode, str);
    }
}
Also used : ChainType(org.apache.sysml.lops.MapMultChain.ChainType) CPOperand(org.apache.sysml.runtime.instructions.cp.CPOperand) DMLRuntimeException(org.apache.sysml.runtime.DMLRuntimeException)

Aggregations

DMLRuntimeException (org.apache.sysml.runtime.DMLRuntimeException)579 MatrixBlock (org.apache.sysml.runtime.matrix.data.MatrixBlock)104 IOException (java.io.IOException)102 MatrixCharacteristics (org.apache.sysml.runtime.matrix.MatrixCharacteristics)85 MatrixObject (org.apache.sysml.runtime.controlprogram.caching.MatrixObject)78 ArrayList (java.util.ArrayList)75 CPOperand (org.apache.sysml.runtime.instructions.cp.CPOperand)49 Path (org.apache.hadoop.fs.Path)43 MatrixIndexes (org.apache.sysml.runtime.matrix.data.MatrixIndexes)40 ExecutorService (java.util.concurrent.ExecutorService)38 Pointer (jcuda.Pointer)37 Future (java.util.concurrent.Future)35 CSRPointer (org.apache.sysml.runtime.instructions.gpu.context.CSRPointer)30 MetaDataFormat (org.apache.sysml.runtime.matrix.MetaDataFormat)26 FrameBlock (org.apache.sysml.runtime.matrix.data.FrameBlock)26 FileSystem (org.apache.hadoop.fs.FileSystem)25 JobConf (org.apache.hadoop.mapred.JobConf)23 Operator (org.apache.sysml.runtime.matrix.operators.Operator)22 KahanObject (org.apache.sysml.runtime.instructions.cp.KahanObject)20 SparkExecutionContext (org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext)19