use of org.apache.sysml.api.mlcontext.MatrixMetadata in project systemml by apache.
the class MLContextTest method testJavaRDDIJVSumDML.
@Test
public void testJavaRDDIJVSumDML() {
System.out.println("MLContextTest - JavaRDD<String> IJV sum DML");
List<String> list = new ArrayList<String>();
list.add("1 1 5");
list.add("2 2 5");
list.add("3 3 5");
JavaRDD<String> javaRDD = sc.parallelize(list);
MatrixMetadata mm = new MatrixMetadata(MatrixFormat.IJV, 3, 3);
Script script = dml("print('sum: ' + sum(M));").in("M", javaRDD, mm);
setExpectedStdOut("sum: 15.0");
ml.execute(script);
}
use of org.apache.sysml.api.mlcontext.MatrixMetadata in project systemml by apache.
the class MLContextTest method testDataFrameSumPYDMLMllibVectorWithNoIDColumn.
@Test
public void testDataFrameSumPYDMLMllibVectorWithNoIDColumn() {
System.out.println("MLContextTest - DataFrame sum PYDML, mllib vector with no ID column");
List<org.apache.spark.mllib.linalg.Vector> list = new ArrayList<org.apache.spark.mllib.linalg.Vector>();
list.add(org.apache.spark.mllib.linalg.Vectors.dense(1.0, 2.0, 3.0));
list.add(org.apache.spark.mllib.linalg.Vectors.dense(4.0, 5.0, 6.0));
list.add(org.apache.spark.mllib.linalg.Vectors.dense(7.0, 8.0, 9.0));
JavaRDD<org.apache.spark.mllib.linalg.Vector> javaRddVector = sc.parallelize(list);
JavaRDD<Row> javaRddRow = javaRddVector.map(new MllibVectorRow());
List<StructField> fields = new ArrayList<StructField>();
fields.add(DataTypes.createStructField("C1", new org.apache.spark.mllib.linalg.VectorUDT(), true));
StructType schema = DataTypes.createStructType(fields);
Dataset<Row> dataFrame = spark.createDataFrame(javaRddRow, schema);
MatrixMetadata mm = new MatrixMetadata(MatrixFormat.DF_VECTOR);
Script script = pydml("print('sum: ' + sum(M))").in("M", dataFrame, mm);
setExpectedStdOut("sum: 45.0");
ml.execute(script);
}
use of org.apache.sysml.api.mlcontext.MatrixMetadata in project systemml by apache.
the class MLContextTest method testDataFrameSumPYDMLDoublesWithNoIDColumn.
@Test
public void testDataFrameSumPYDMLDoublesWithNoIDColumn() {
System.out.println("MLContextTest - DataFrame sum PYDML, doubles with no ID column");
List<String> list = new ArrayList<String>();
list.add("10,20,30");
list.add("40,50,60");
list.add("70,80,90");
JavaRDD<String> javaRddString = sc.parallelize(list);
JavaRDD<Row> javaRddRow = javaRddString.map(new CommaSeparatedValueStringToDoubleArrayRow());
List<StructField> fields = new ArrayList<StructField>();
fields.add(DataTypes.createStructField("C1", DataTypes.DoubleType, true));
fields.add(DataTypes.createStructField("C2", DataTypes.DoubleType, true));
fields.add(DataTypes.createStructField("C3", DataTypes.DoubleType, true));
StructType schema = DataTypes.createStructType(fields);
Dataset<Row> dataFrame = spark.createDataFrame(javaRddRow, schema);
MatrixMetadata mm = new MatrixMetadata(MatrixFormat.DF_DOUBLES);
Script script = pydml("print('sum: ' + sum(M))").in("M", dataFrame, mm);
setExpectedStdOut("sum: 450.0");
ml.execute(script);
}
use of org.apache.sysml.api.mlcontext.MatrixMetadata in project systemml by apache.
the class MLContextTest method testRDDGoodMetadataDML.
@Test
public void testRDDGoodMetadataDML() {
System.out.println("MLContextTest - RDD<String> good metadata DML");
List<String> list = new ArrayList<String>();
list.add("1,1,1");
list.add("2,2,2");
list.add("3,3,3");
JavaRDD<String> javaRDD = sc.parallelize(list);
RDD<String> rdd = JavaRDD.toRDD(javaRDD);
MatrixMetadata mm = new MatrixMetadata(3, 3, 9);
Script script = dml("print('sum: ' + sum(M));").in("M", rdd, mm);
setExpectedStdOut("sum: 18.0");
ml.execute(script);
}
use of org.apache.sysml.api.mlcontext.MatrixMetadata in project systemml by apache.
the class MLContextTest method testDataFrameSumPYDMLDoublesWithIDColumn.
@Test
public void testDataFrameSumPYDMLDoublesWithIDColumn() {
System.out.println("MLContextTest - DataFrame sum PYDML, doubles with ID column");
List<String> list = new ArrayList<String>();
list.add("1,1,2,3");
list.add("2,4,5,6");
list.add("3,7,8,9");
JavaRDD<String> javaRddString = sc.parallelize(list);
JavaRDD<Row> javaRddRow = javaRddString.map(new CommaSeparatedValueStringToDoubleArrayRow());
List<StructField> fields = new ArrayList<StructField>();
fields.add(DataTypes.createStructField(RDDConverterUtils.DF_ID_COLUMN, DataTypes.DoubleType, true));
fields.add(DataTypes.createStructField("C1", DataTypes.DoubleType, true));
fields.add(DataTypes.createStructField("C2", DataTypes.DoubleType, true));
fields.add(DataTypes.createStructField("C3", DataTypes.DoubleType, true));
StructType schema = DataTypes.createStructType(fields);
Dataset<Row> dataFrame = spark.createDataFrame(javaRddRow, schema);
MatrixMetadata mm = new MatrixMetadata(MatrixFormat.DF_DOUBLES_WITH_INDEX);
Script script = pydml("print('sum: ' + sum(M))").in("M", dataFrame, mm);
setExpectedStdOut("sum: 45.0");
ml.execute(script);
}
Aggregations