use of org.apache.spark.mllib.linalg.distributed.MatrixEntry in project incubator-systemml by apache.
the class GNMFTest method testGNMFWithRDMLAndJava.
@Test
public void testGNMFWithRDMLAndJava() throws IOException, DMLException, ParseException {
System.out.println("------------ BEGIN " + TEST_NAME + " TEST {" + numRegisteredInputs + ", " + numRegisteredOutputs + "} ------------");
this.scriptType = ScriptType.DML;
int m = 2000;
int n = 1500;
int k = 50;
int maxiter = 2;
double Eps = Math.pow(10, -8);
getAndLoadTestConfiguration(TEST_NAME);
List<String> proArgs = new ArrayList<String>();
proArgs.add(input("v"));
proArgs.add(input("w"));
proArgs.add(input("h"));
proArgs.add(Integer.toString(maxiter));
proArgs.add(output("w"));
proArgs.add(output("h"));
programArgs = proArgs.toArray(new String[proArgs.size()]);
fullDMLScriptName = getScript();
rCmd = getRCmd(inputDir(), Integer.toString(maxiter), expectedDir());
double[][] v = getRandomMatrix(m, n, 1, 5, 0.2, System.currentTimeMillis());
double[][] w = getRandomMatrix(m, k, 0, 1, 1, System.currentTimeMillis());
double[][] h = getRandomMatrix(k, n, 0, 1, 1, System.currentTimeMillis());
writeInputMatrixWithMTD("v", v, true);
writeInputMatrixWithMTD("w", w, true);
writeInputMatrixWithMTD("h", h, true);
for (int i = 0; i < maxiter; i++) {
double[][] tW = TestUtils.performTranspose(w);
double[][] tWV = TestUtils.performMatrixMultiplication(tW, v);
double[][] tWW = TestUtils.performMatrixMultiplication(tW, w);
double[][] tWWH = TestUtils.performMatrixMultiplication(tWW, h);
for (int j = 0; j < k; j++) {
for (int l = 0; l < n; l++) {
h[j][l] = h[j][l] * (tWV[j][l] / (tWWH[j][l] + Eps));
}
}
double[][] tH = TestUtils.performTranspose(h);
double[][] vTH = TestUtils.performMatrixMultiplication(v, tH);
double[][] hTH = TestUtils.performMatrixMultiplication(h, tH);
double[][] wHTH = TestUtils.performMatrixMultiplication(w, hTH);
for (int j = 0; j < m; j++) {
for (int l = 0; l < k; l++) {
w[j][l] = w[j][l] * (vTH[j][l] / (wHTH[j][l] + Eps));
}
}
}
boolean oldConfig = DMLScript.USE_LOCAL_SPARK_CONFIG;
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
RUNTIME_PLATFORM oldRT = DMLScript.rtplatform;
try {
DMLScript.rtplatform = RUNTIME_PLATFORM.HYBRID_SPARK;
Script script = ScriptFactory.dmlFromFile(fullDMLScriptName);
// set positional argument values
for (int argNum = 1; argNum <= proArgs.size(); argNum++) {
script.in("$" + argNum, proArgs.get(argNum - 1));
}
// Read two matrices through RDD and one through HDFS
if (numRegisteredInputs >= 1) {
JavaRDD<String> vIn = sc.sc().textFile(input("v"), 2).toJavaRDD();
MatrixMetadata mm = new MatrixMetadata(MatrixFormat.IJV, m, n);
script.in("V", vIn, mm);
}
if (numRegisteredInputs >= 2) {
JavaRDD<String> wIn = sc.sc().textFile(input("w"), 2).toJavaRDD();
MatrixMetadata mm = new MatrixMetadata(MatrixFormat.IJV, m, k);
script.in("W", wIn, mm);
}
if (numRegisteredInputs >= 3) {
JavaRDD<String> hIn = sc.sc().textFile(input("h"), 2).toJavaRDD();
MatrixMetadata mm = new MatrixMetadata(MatrixFormat.IJV, k, n);
script.in("H", hIn, mm);
}
// Output one matrix to HDFS and get one as RDD
if (numRegisteredOutputs >= 1) {
script.out("H");
}
if (numRegisteredOutputs >= 2) {
script.out("W");
ml.setConfigProperty("cp.parallel.matrixmult", "false");
}
MLResults results = ml.execute(script);
if (numRegisteredOutputs >= 2) {
String configStr = ConfigurationManager.getDMLConfig().getConfigInfo();
if (configStr.contains("cp.parallel.matrixmult: true"))
Assert.fail("Configuration not updated via setConfig");
}
if (numRegisteredOutputs >= 1) {
RDD<String> hOut = results.getRDDStringIJV("H");
String fName = output("h");
try {
MapReduceTool.deleteFileIfExistOnHDFS(fName);
} catch (IOException e) {
throw new DMLRuntimeException("Error: While deleting file on HDFS");
}
hOut.saveAsTextFile(fName);
}
if (numRegisteredOutputs >= 2) {
JavaRDD<String> javaRDDStringIJV = results.getJavaRDDStringIJV("W");
JavaRDD<MatrixEntry> matRDD = javaRDDStringIJV.map(new StringToMatrixEntry());
Matrix matrix = results.getMatrix("W");
MatrixCharacteristics mcW = matrix.getMatrixMetadata().asMatrixCharacteristics();
CoordinateMatrix coordinateMatrix = new CoordinateMatrix(matRDD.rdd(), mcW.getRows(), mcW.getCols());
JavaPairRDD<MatrixIndexes, MatrixBlock> binaryRDD = RDDConverterUtilsExt.coordinateMatrixToBinaryBlock(sc, coordinateMatrix, mcW, true);
JavaRDD<String> wOut = RDDConverterUtils.binaryBlockToTextCell(binaryRDD, mcW);
String fName = output("w");
try {
MapReduceTool.deleteFileIfExistOnHDFS(fName);
} catch (IOException e) {
throw new DMLRuntimeException("Error: While deleting file on HDFS");
}
wOut.saveAsTextFile(fName);
}
runRScript(true);
//compare matrices
HashMap<CellIndex, Double> hmWDML = readDMLMatrixFromHDFS("w");
HashMap<CellIndex, Double> hmHDML = readDMLMatrixFromHDFS("h");
HashMap<CellIndex, Double> hmWR = readRMatrixFromFS("w");
HashMap<CellIndex, Double> hmHR = readRMatrixFromFS("h");
TestUtils.compareMatrices(hmWDML, hmWR, 0.000001, "hmWDML", "hmWR");
TestUtils.compareMatrices(hmHDML, hmHR, 0.000001, "hmHDML", "hmHR");
} finally {
DMLScript.rtplatform = oldRT;
DMLScript.USE_LOCAL_SPARK_CONFIG = oldConfig;
}
}
Aggregations