use of de.jannlab.data.Sample in project narchy by automenta.
the class RNNBeliefPrediction method predict.
protected double[] predict() {
if (data == null) {
return null;
}
if (predictedOutput == null) {
predictedOutput = new double[getPredictionSize()];
}
Sample lastSample = data.get(data.size() - 1);
double error = NetTools.performForward(this.net, lastSample);
net.output(predictedOutput, 0);
if (normalizeOutputVector)
predictedOutput = normalize(predictedOutput);
System.out.println("output: " + Arrays.toString(predictedOutput) + " " + error);
return predictedOutput;
}
use of de.jannlab.data.Sample in project narchy by automenta.
the class RNNBeliefPrediction method train.
@Override
protected void train() {
//
// double[] target = {((data[x(i1)] + data[x(i2)])/2.0)};
// new Sample(data, target, 2, length, 1, 1);
TreeMap<Integer, double[]> d = new TreeMap();
int cc = 0;
int hd = Math.round(predictionTimeSpanFactor * nar.memory.getDuration() / 2f / downSample);
for (Concept c : concepts) {
for (Sentence s : c.beliefs) {
if (s.isEternal()) {
continue;
}
int o = (int) Math.round(((double) s.getOccurenceTime()) / ((double) downSample));
if (o > nar.time()) {
// non-future beliefs
continue;
}
for (int oc = o - hd; oc <= o + hd; oc++) {
double[] x = d.get(oc);
if (x == null) {
x = new double[inputSize];
d.put(oc, x);
}
float freq = 2f * (s.truth.getFrequency() - 0.5f);
float conf = s.truth.getConfidence();
if (freq < 0) {
}
x[cc] += freq * conf;
}
}
cc++;
}
if (d.size() < 2) {
data = null;
return;
}
data = new SampleSet();
int first = d.firstKey();
int last = (int) nar.time();
if (last - first > maxDataFrames * downSample) {
first = last - maxDataFrames * downSample;
}
int frames = (int) (last - first);
int bsize = getInputSize() * frames;
int isize = getPredictionSize() * frames;
if (actual == null || actual.length != bsize)
actual = new double[bsize];
else
Arrays.fill(actual, 0);
if (ideal == null || ideal.length != isize)
ideal = new double[isize];
else
Arrays.fill(ideal, 0);
int idealSize = getPredictionSize();
int ac = 0, id = 0;
double[] prevX = null;
for (int i = first; i <= last; i++) {
double[] x = d.get(i);
if (x == null) {
x = new double[inputSize];
} else {
if (normalizeInputVectors) {
x = normalize(x);
}
}
if (prevX != null) {
System.arraycopy(prevX, 0, actual, ac, inputSize);
ac += inputSize;
System.arraycopy(getTrainedPrediction(x), 0, ideal, id, idealSize);
id += idealSize;
}
prevX = x;
}
Sample s = new Sample(actual, ideal, inputSize, idealSize);
data.add(s);
if (trainer == null) {
trainer = new GradientDescent();
trainer.setNet(net);
trainer.setRnd(rnd);
trainer.setPermute(true);
trainer.setTrainingSet(data);
trainer.setLearningRate(learningrate);
trainer.setMomentum(momentum);
trainer.setEpochs(trainIterationsPerCycle);
trainer.setEarlyStopping(false);
trainer.setOnline(true);
trainer.setTargetError(0);
trainer.clearListener();
} else {
// trainer.reset();
}
trainer.train();
// System.out.println("LSTM error: " + trainer.getTrainingError());
}