use of edu.cmu.tetrad.util.TetradVector in project tetrad by cmu-phil.
the class Ling method removeZeroRowsAndCols.
private TetradMatrix removeZeroRowsAndCols(TetradMatrix w, List<Node> variables) {
TetradMatrix _W = w.copy();
List<Node> _variables = new ArrayList<>(variables);
List<Integer> remove = new ArrayList<>();
ROW: for (int i = 0; i < _W.rows(); i++) {
TetradVector row = _W.getRow(i);
for (int j = 0; j < row.size(); j++) {
if (row.get(j) != 0)
continue ROW;
}
remove.add(i);
}
COLUMN: for (int i = 0; i < _W.rows(); i++) {
TetradVector col = _W.getColumn(i);
for (int j = 0; j < col.size(); j++) {
if (col.get(j) != 0)
continue COLUMN;
}
if (!remove.contains((i))) {
remove.add(i);
}
}
int[] rows = new int[_W.rows() - remove.size()];
int count = -1;
for (int k = 0; k < w.rows(); k++) {
if (remove.contains(k)) {
variables.remove(_variables.get(k));
} else {
if (!remove.contains(k))
rows[++count] = k;
}
}
w = w.getSelection(rows, rows);
return w;
}
use of edu.cmu.tetrad.util.TetradVector in project tetrad by cmu-phil.
the class Ling method permuteRows.
private static TetradMatrix permuteRows(TetradMatrix mat, List<Integer> permutation) {
TetradMatrix permutedMat = mat.like();
for (int j = 0; j < mat.rows(); j++) {
TetradVector row = mat.getRow(j);
permutedMat.assignRow(permutation.get(j), row);
}
return permutedMat;
}
use of edu.cmu.tetrad.util.TetradVector in project tetrad by cmu-phil.
the class Ling method makeDataSet.
private void makeDataSet(GraphWithParameters graphWP) {
// define the "Gaussian-squared" distribution
Distribution gp2 = new GaussianPower(2);
// the coefficients of the error terms (here, all 1s)
TetradVector errorCoefficients = getErrorCoeffsIdentity(graphWP.getGraph().getNumNodes());
// generate data from the SEM
TetradMatrix inVectors = simulateCyclic(graphWP, errorCoefficients, numSamples, gp2);
// reformat it
dataSet = ColtDataSet.makeContinuousData(graphWP.getGraph().getNodes(), new TetradMatrix(inVectors.transpose().toArray()));
}
use of edu.cmu.tetrad.util.TetradVector in project tetrad by cmu-phil.
the class LingamPattern method getScore.
// =============================PRIVATE METHODS=========================//
private Score getScore(Graph dag, TetradMatrix data, List<Node> variables) {
// System.out.println("Scoring DAG: " + dag);
Regression regression = new RegressionDataset(data, variables);
List<Node> nodes = dag.getNodes();
double score = 0.0;
double[] pValues = new double[nodes.size()];
TetradMatrix residuals = new TetradMatrix(data.rows(), data.columns());
for (int i = 0; i < nodes.size(); i++) {
Node _target = nodes.get(i);
List<Node> _regressors = dag.getParents(_target);
Node target = getVariable(variables, _target.getName());
List<Node> regressors = new ArrayList<>();
for (Node _regressor : _regressors) {
Node variable = getVariable(variables, _regressor.getName());
regressors.add(variable);
}
RegressionResult result = regression.regress(target, regressors);
TetradVector residualsColumn = result.getResiduals();
// residuals.viewColumn(i).assign(residualsColumn);
residuals.assignColumn(i, residualsColumn);
DoubleArrayList residualsArray = new DoubleArrayList(residualsColumn.toArray());
double mean = Descriptive.mean(residualsArray);
double std = Descriptive.standardDeviation(Descriptive.variance(residualsArray.size(), Descriptive.sum(residualsArray), Descriptive.sumOfSquares(residualsArray)));
for (int i2 = 0; i2 < residualsArray.size(); i2++) {
residualsArray.set(i2, (residualsArray.get(i2) - mean) / std);
residualsArray.set(i2, Math.abs(residualsArray.get(i2)));
}
double _mean = Descriptive.mean(residualsArray);
double diff = _mean - Math.sqrt(2.0 / Math.PI);
score += diff * diff;
}
for (int j = 0; j < residuals.columns(); j++) {
double[] x = residuals.getColumn(j).toArray();
double p = new AndersonDarlingTest(x).getP();
pValues[j] = p;
}
return new Score(score, pValues);
}
use of edu.cmu.tetrad.util.TetradVector in project tetrad by cmu-phil.
the class MNLRLikelihood method getLik.
public double getLik(int child_index, int[] parents) {
double lik = 0;
Node c = variables.get(child_index);
List<ContinuousVariable> continuous_parents = new ArrayList<>();
List<DiscreteVariable> discrete_parents = new ArrayList<>();
for (int p : parents) {
Node parent = variables.get(p);
if (parent instanceof ContinuousVariable) {
continuous_parents.add((ContinuousVariable) parent);
} else {
discrete_parents.add((DiscreteVariable) parent);
}
}
int p = continuous_parents.size();
List<List<Integer>> cells = adTree.getCellLeaves(discrete_parents);
// List<List<Integer>> cells = partition(discrete_parents);
int[] continuousCols = new int[p];
for (int j = 0; j < p; j++) continuousCols[j] = nodesHash.get(continuous_parents.get(j));
for (List<Integer> cell : cells) {
int r = cell.size();
if (r > 1) {
double[] mean = new double[p];
double[] var = new double[p];
for (int i = 0; i < p; i++) {
for (int j = 0; j < r; j++) {
mean[i] += continuousData[continuousCols[i]][cell.get(j)];
var[i] += Math.pow(continuousData[continuousCols[i]][cell.get(j)], 2);
}
mean[i] /= r;
var[i] /= r;
var[i] -= Math.pow(mean[i], 2);
var[i] = Math.sqrt(var[i]);
if (Double.isNaN(var[i])) {
System.out.println(var[i]);
}
}
int degree = fDegree;
if (fDegree < 1) {
degree = (int) Math.floor(Math.log(r));
}
TetradMatrix subset = new TetradMatrix(r, p * degree + 1);
for (int i = 0; i < r; i++) {
subset.set(i, p * degree, 1);
for (int j = 0; j < p; j++) {
for (int d = 0; d < degree; d++) {
subset.set(i, p * d + j, Math.pow((continuousData[continuousCols[j]][cell.get(i)] - mean[j]) / var[j], d + 1));
}
}
}
if (c instanceof ContinuousVariable) {
TetradVector target = new TetradVector(r);
for (int i = 0; i < r; i++) {
target.set(i, continuousData[child_index][cell.get(i)]);
}
lik += multipleRegression(target, subset);
} else {
ArrayList<Integer> temp = new ArrayList<>();
TetradMatrix target = new TetradMatrix(r, ((DiscreteVariable) c).getNumCategories());
for (int i = 0; i < r; i++) {
for (int j = 0; j < ((DiscreteVariable) c).getNumCategories(); j++) {
target.set(i, j, -1);
}
target.set(i, discreteData[child_index][cell.get(i)], 1);
}
lik += MultinomialLogisticRegression(target, subset);
}
}
}
return lik;
}
Aggregations