use of org.apache.sysml.parser.Expression.ValueType in project incubator-systemml by apache.
the class ProgramConverter method parseDataIdentifier.
private static DataIdentifier parseDataIdentifier(String in) {
StringTokenizer st = new StringTokenizer(in, DATA_FIELD_DELIM);
String name = st.nextToken();
DataType dt = DataType.valueOf(st.nextToken());
ValueType vt = ValueType.valueOf(st.nextToken());
DataIdentifier dat = new DataIdentifier(name);
dat.setDataType(dt);
dat.setValueType(vt);
return dat;
}
use of org.apache.sysml.parser.Expression.ValueType in project incubator-systemml by apache.
the class ResultMergeLocalMemory method createNewMatrixObject.
private MatrixObject createNewMatrixObject(MatrixBlock data) {
ValueType vt = _output.getValueType();
MetaDataFormat metadata = (MetaDataFormat) _output.getMetaData();
MatrixObject moNew = new MatrixObject(vt, _outputFName);
// create deep copy of metadata obj
MatrixCharacteristics mcOld = metadata.getMatrixCharacteristics();
OutputInfo oiOld = metadata.getOutputInfo();
InputInfo iiOld = metadata.getInputInfo();
MatrixCharacteristics mc = new MatrixCharacteristics(mcOld.getRows(), mcOld.getCols(), mcOld.getRowsPerBlock(), mcOld.getColsPerBlock());
mc.setNonZeros(data.getNonZeros());
MetaDataFormat meta = new MetaDataFormat(mc, oiOld, iiOld);
moNew.setMetaData(meta);
// adjust dense/sparse representation
data.examSparsity();
// release new output
moNew.acquireModify(data);
moNew.release();
return moNew;
}
use of org.apache.sysml.parser.Expression.ValueType in project incubator-systemml by apache.
the class VariableCPInstruction method parseInstruction.
public static VariableCPInstruction parseInstruction(String str) {
String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
String opcode = parts[0];
VariableOperationCode voc = getVariableOperationCode(opcode);
if (voc == VariableOperationCode.CreateVariable) {
if (// && parts.length != 10 )
parts.length < 5)
throw new DMLRuntimeException("Invalid number of operands in createvar instruction: " + str);
} else if (voc == VariableOperationCode.MoveVariable) {
// mvvar tempA A; or mvvar mvar5 "data/out.mtx" "binary"
if (parts.length != 3 && parts.length != 4)
throw new DMLRuntimeException("Invalid number of operands in mvvar instruction: " + str);
} else if (voc == VariableOperationCode.Write) {
// Write instructions for csv files also include three additional parameters (hasHeader, delimiter, sparse)
if (parts.length != 5 && parts.length != 8)
throw new DMLRuntimeException("Invalid number of operands in write instruction: " + str);
} else {
if (voc != VariableOperationCode.RemoveVariable)
// no output
InstructionUtils.checkNumFields(parts, getArity(voc));
}
CPOperand in1 = null, in2 = null, in3 = null, in4 = null, out = null;
switch(voc) {
case CreateVariable:
// variable name
DataType dt = DataType.valueOf(parts[4]);
ValueType vt = dt == DataType.MATRIX ? ValueType.DOUBLE : ValueType.STRING;
int extSchema = (dt == DataType.FRAME && parts.length >= 13) ? 1 : 0;
in1 = new CPOperand(parts[1], vt, dt);
// file name
in2 = new CPOperand(parts[2], ValueType.STRING, DataType.SCALAR);
// file name override flag (always literal)
in3 = new CPOperand(parts[3], ValueType.BOOLEAN, DataType.SCALAR);
// format
String fmt = parts[5];
if (fmt.equalsIgnoreCase("csv")) {
// 14 inputs: createvar corresponding to READ -- includes properties hasHeader, delim, fill, and fillValue
if (parts.length < 15 + extSchema || parts.length > 17 + extSchema)
throw new DMLRuntimeException("Invalid number of operands in createvar instruction: " + str);
} else {
if (parts.length != 6 && parts.length != 12 + extSchema)
throw new DMLRuntimeException("Invalid number of operands in createvar instruction: " + str);
}
OutputInfo oi = OutputInfo.stringToOutputInfo(fmt);
InputInfo ii = OutputInfo.getMatchingInputInfo(oi);
MatrixCharacteristics mc = new MatrixCharacteristics();
if (parts.length == 6) {
// do nothing
} else if (parts.length >= 11) {
// matrix characteristics
mc.setDimension(Long.parseLong(parts[6]), Long.parseLong(parts[7]));
mc.setBlockSize(Integer.parseInt(parts[8]), Integer.parseInt(parts[9]));
mc.setNonZeros(Long.parseLong(parts[10]));
} else {
throw new DMLRuntimeException("Invalid number of operands in createvar instruction: " + str);
}
MetaDataFormat iimd = new MetaDataFormat(mc, oi, ii);
UpdateType updateType = UpdateType.COPY;
if (parts.length >= 12)
updateType = UpdateType.valueOf(parts[11].toUpperCase());
// handle frame schema
String schema = (dt == DataType.FRAME && parts.length >= 13) ? parts[parts.length - 1] : null;
if (fmt.equalsIgnoreCase("csv")) {
// Cretevar instructions for CSV format either has 13 or 14 inputs.
// 13 inputs: createvar corresponding to WRITE -- includes properties hasHeader, delim, and sparse
// 14 inputs: createvar corresponding to READ -- includes properties hasHeader, delim, fill, and fillValue
FileFormatProperties fmtProperties = null;
if (parts.length == 15 + extSchema) {
boolean hasHeader = Boolean.parseBoolean(parts[12]);
String delim = parts[13];
boolean sparse = Boolean.parseBoolean(parts[14]);
fmtProperties = new CSVFileFormatProperties(hasHeader, delim, sparse);
} else {
boolean hasHeader = Boolean.parseBoolean(parts[12]);
String delim = parts[13];
boolean fill = Boolean.parseBoolean(parts[14]);
double fillValue = UtilFunctions.parseToDouble(parts[15]);
String naStrings = null;
if (parts.length == 17 + extSchema)
naStrings = parts[16];
fmtProperties = new CSVFileFormatProperties(hasHeader, delim, fill, fillValue, naStrings);
}
return new VariableCPInstruction(VariableOperationCode.CreateVariable, in1, in2, in3, iimd, updateType, fmtProperties, schema, opcode, str);
} else {
return new VariableCPInstruction(VariableOperationCode.CreateVariable, in1, in2, in3, iimd, updateType, schema, opcode, str);
}
case AssignVariable:
in1 = new CPOperand(parts[1]);
in2 = new CPOperand(parts[2]);
break;
case CopyVariable:
// Value types are not given here
in1 = new CPOperand(parts[1], ValueType.UNKNOWN, DataType.UNKNOWN);
in2 = new CPOperand(parts[2], ValueType.UNKNOWN, DataType.UNKNOWN);
break;
case MoveVariable:
in1 = new CPOperand(parts[1], ValueType.UNKNOWN, DataType.UNKNOWN);
in2 = new CPOperand(parts[2], ValueType.UNKNOWN, DataType.UNKNOWN);
if (parts.length > 3)
in3 = new CPOperand(parts[3], ValueType.UNKNOWN, DataType.UNKNOWN);
break;
case RemoveVariable:
VariableCPInstruction rminst = new VariableCPInstruction(getVariableOperationCode(opcode), null, null, null, out, opcode, str);
for (int i = 1; i < parts.length; i++) rminst.addInput(new CPOperand(parts[i], ValueType.UNKNOWN, DataType.SCALAR));
return rminst;
case RemoveVariableAndFile:
in1 = new CPOperand(parts[1]);
in2 = new CPOperand(parts[2]);
// second argument must be a boolean
if (in2.getValueType() != ValueType.BOOLEAN)
throw new DMLRuntimeException("Unexpected value type for second argument in: " + str);
break;
case CastAsScalarVariable:
case CastAsMatrixVariable:
case CastAsFrameVariable:
case CastAsDoubleVariable:
case CastAsIntegerVariable:
case CastAsBooleanVariable:
// first operand is a variable name => string value type
in1 = new CPOperand(parts[1]);
// output variable name
out = new CPOperand(parts[2]);
break;
case Write:
in1 = new CPOperand(parts[1]);
in2 = new CPOperand(parts[2]);
in3 = new CPOperand(parts[3]);
FileFormatProperties fprops = null;
if (in3.getName().equalsIgnoreCase("csv")) {
boolean hasHeader = Boolean.parseBoolean(parts[4]);
String delim = parts[5];
boolean sparse = Boolean.parseBoolean(parts[6]);
fprops = new CSVFileFormatProperties(hasHeader, delim, sparse);
// description
in4 = new CPOperand(parts[7]);
} else {
fprops = new FileFormatProperties();
// description
in4 = new CPOperand(parts[4]);
}
VariableCPInstruction inst = new VariableCPInstruction(getVariableOperationCode(opcode), in1, in2, in3, out, null, fprops, null, null, opcode, str);
inst.addInput(in4);
return inst;
case Read:
in1 = new CPOperand(parts[1]);
in2 = new CPOperand(parts[2]);
out = null;
break;
case SetFileName:
// variable name
in1 = new CPOperand(parts[1]);
// file name
in2 = new CPOperand(parts[2], ValueType.UNKNOWN, DataType.UNKNOWN);
// option: remote or local
in3 = new CPOperand(parts[3], ValueType.UNKNOWN, DataType.UNKNOWN);
// return new VariableCPInstruction(getVariableOperationCode(opcode), in1, in2, in3, str);
break;
}
return new VariableCPInstruction(getVariableOperationCode(opcode), in1, in2, in3, out, opcode, str);
}
use of org.apache.sysml.parser.Expression.ValueType in project incubator-systemml by apache.
the class WriteSPInstruction method processInstruction.
@Override
public void processInstruction(ExecutionContext ec) {
SparkExecutionContext sec = (SparkExecutionContext) ec;
// get filename (literal or variable expression)
String fname = ec.getScalarInput(input2.getName(), ValueType.STRING, input2.isLiteral()).getStringValue();
String desc = ec.getScalarInput(input4.getName(), ValueType.STRING, input4.isLiteral()).getStringValue();
formatProperties.setDescription(desc);
ValueType[] schema = (input1.getDataType() == DataType.FRAME) ? sec.getFrameObject(input1.getName()).getSchema() : null;
try {
// if the file already exists on HDFS, remove it.
MapReduceTool.deleteFileIfExistOnHDFS(fname);
// prepare output info according to meta data
String outFmt = input3.getName();
OutputInfo oi = OutputInfo.stringToOutputInfo(outFmt);
// core matrix/frame write
if (input1.getDataType() == DataType.MATRIX)
processMatrixWriteInstruction(sec, fname, oi);
else
processFrameWriteInstruction(sec, fname, oi, schema);
} catch (IOException ex) {
throw new DMLRuntimeException("Failed to process write instruction", ex);
}
}
use of org.apache.sysml.parser.Expression.ValueType in project incubator-systemml by apache.
the class FrameBlock method readFields.
@Override
public void readFields(DataInput in) throws IOException {
// read head (rows, cols)
_numRows = in.readInt();
int numCols = in.readInt();
boolean isDefaultMeta = in.readBoolean();
// allocate schema/meta data arrays
_schema = (_schema != null && _schema.length == numCols) ? _schema : new ValueType[numCols];
_colnames = (_colnames != null && _colnames.length == numCols) ? _colnames : new String[numCols];
_colmeta = (_colmeta != null && _colmeta.length == numCols) ? _colmeta : new ColumnMetadata[numCols];
_coldata = (_coldata != null && _coldata.length == numCols) ? _coldata : new Array[numCols];
// read columns (value type, meta, data)
for (int j = 0; j < numCols; j++) {
ValueType vt = ValueType.values()[in.readByte()];
String name = isDefaultMeta ? createColName(j) : in.readUTF();
long ndistinct = isDefaultMeta ? 0 : in.readLong();
String mvvalue = isDefaultMeta ? null : in.readUTF();
Array arr = null;
switch(vt) {
case STRING:
arr = new StringArray(new String[_numRows]);
break;
case BOOLEAN:
arr = new BooleanArray(new boolean[_numRows]);
break;
case INT:
arr = new LongArray(new long[_numRows]);
break;
case DOUBLE:
arr = new DoubleArray(new double[_numRows]);
break;
default:
throw new IOException("Unsupported value type: " + vt);
}
arr.readFields(in);
_schema[j] = vt;
_colnames[j] = name;
_colmeta[j] = new ColumnMetadata(ndistinct, (mvvalue == null || mvvalue.isEmpty()) ? null : mvvalue);
_coldata[j] = arr;
}
}
Aggregations