use of org.apache.ignite.ml.math.Vector in project ignite by apache.
the class AbstractVector method logNormalize.
/**
* @param power Power.
* @param normLen Normalized length.
* @return logNormalized value.
*/
private Vector logNormalize(double power, double normLen) {
assert !(Double.isInfinite(power) || power <= 1.0);
double denominator = normLen * Math.log(power);
Vector cp = copy();
for (Element element : cp.all()) element.set(Math.log1p(element.get()) / denominator);
return cp;
}
use of org.apache.ignite.ml.math.Vector in project ignite by apache.
the class AbstractVector method plus.
/**
* {@inheritDoc}
*/
@Override
public Vector plus(Vector vec) {
checkCardinality(vec);
Vector cp = copy();
return cp.map(vec, Functions.PLUS);
}
use of org.apache.ignite.ml.math.Vector in project ignite by apache.
the class MLPGroupUpdateTrainerCacheInput method batchSupplier.
/**
* {@inheritDoc}
*/
@Override
public IgniteSupplier<IgniteBiTuple<Matrix, Matrix>> batchSupplier() {
String cName = cache.getName();
// This line is for prohibiting of 'this' object be caught into serialization context of lambda.
int bs = batchSize;
// This line is for prohibiting of 'this' object be caught into serialization context of lambda.
Random r = rand;
return () -> {
Ignite ignite = Ignition.localIgnite();
IgniteCache<Integer, LabeledVector<Vector, Vector>> cache = ignite.getOrCreateCache(cName);
int total = cache.size();
Affinity<Integer> affinity = ignite.affinity(cName);
List<Integer> allKeys = IntStream.range(0, total).boxed().collect(Collectors.toList());
List<Integer> keys = new ArrayList<>(affinity.mapKeysToNodes(allKeys).get(ignite.cluster().localNode()));
int locKeysCnt = keys.size();
int[] selected = Utils.selectKDistinct(locKeysCnt, Math.min(bs, locKeysCnt), r);
// Get dimensions of vectors in cache. We suppose that every feature vector has
// same dimension d 1 and every label has the same dimension d2.
LabeledVector<Vector, Vector> dimEntry = cache.get(keys.get(selected[0]));
Matrix inputs = new DenseLocalOnHeapMatrix(dimEntry.features().size(), bs);
Matrix groundTruth = new DenseLocalOnHeapMatrix(dimEntry.label().size(), bs);
for (int i = 0; i < selected.length; i++) {
LabeledVector<Vector, Vector> labeled = cache.get(keys.get(selected[i]));
inputs.assignColumn(i, labeled.features());
groundTruth.assignColumn(i, labeled.label());
}
return new IgniteBiTuple<>(inputs, groundTruth);
};
}
use of org.apache.ignite.ml.math.Vector in project ignite by apache.
the class GradientDescent method calculateDistributedGradient.
/**
* Calculates gradient based in distributed matrix using {@link SparseDistributedMatrixMapReducer}.
*
* @param data Distributed matrix
* @param weights Point to calculate gradient
* @return Gradient
*/
private Vector calculateDistributedGradient(SparseDistributedMatrix data, Vector weights) {
SparseDistributedMatrixMapReducer mapReducer = new SparseDistributedMatrixMapReducer(data);
return mapReducer.mapReduce((matrix, args) -> {
Matrix inputs = extractInputs(matrix);
Vector groundTruth = extractGroundTruth(matrix);
return lossGradient.compute(inputs, groundTruth, args);
}, gradients -> {
int cnt = 0;
Vector resGradient = new DenseLocalOnHeapVector(data.columnSize());
for (Vector gradient : gradients) {
if (gradient != null) {
resGradient = resGradient.plus(gradient);
cnt++;
}
}
return resGradient.divide(cnt);
}, weights);
}
use of org.apache.ignite.ml.math.Vector in project ignite by apache.
the class QRDSolver method solve.
/**
* Least squares solution of {@code A*X = B}; {@code returns X}.
*
* @param mtx A matrix with as many rows as {@code A} and any number of cols.
* @return {@code X<} that minimizes the two norm of {@code Q*R*X - B}.
* @throws IllegalArgumentException if {@code B.rows() != A.rows()}.
*/
public Matrix solve(Matrix mtx) {
if (mtx.rowSize() != q.rowSize())
throw new IllegalArgumentException("Matrix row dimensions must agree.");
int cols = mtx.columnSize();
Matrix x = like(r, r.columnSize(), cols);
Matrix qt = q.transpose();
Matrix y = qt.times(mtx);
for (int k = Math.min(r.columnSize(), q.rowSize()) - 1; k >= 0; k--) {
// X[k,] = Y[k,] / R[k,k], note that X[k,] starts with 0 so += is same as =
x.viewRow(k).map(y.viewRow(k), Functions.plusMult(1 / r.get(k, k)));
if (k == 0)
continue;
// Y[0:(k-1),] -= R[0:(k-1),k] * X[k,]
Vector rCol = r.viewColumn(k).viewPart(0, k);
for (int c = 0; c < cols; c++) y.viewColumn(c).viewPart(0, k).map(rCol, Functions.plusMult(-x.get(k, c)));
}
return x;
}
Aggregations