Search in sources :

Example 1 with GradientDescent

use of de.jannlab.training.GradientDescent in project narchy by automenta.

the class RNNBeliefPrediction method train.

@Override
protected void train() {
    // 
    // double[] target = {((data[x(i1)] + data[x(i2)])/2.0)};
    // new Sample(data, target, 2, length, 1, 1);
    TreeMap<Integer, double[]> d = new TreeMap();
    int cc = 0;
    int hd = Math.round(predictionTimeSpanFactor * nar.memory.getDuration() / 2f / downSample);
    for (Concept c : concepts) {
        for (Sentence s : c.beliefs) {
            if (s.isEternal()) {
                continue;
            }
            int o = (int) Math.round(((double) s.getOccurenceTime()) / ((double) downSample));
            if (o > nar.time()) {
                // non-future beliefs
                continue;
            }
            for (int oc = o - hd; oc <= o + hd; oc++) {
                double[] x = d.get(oc);
                if (x == null) {
                    x = new double[inputSize];
                    d.put(oc, x);
                }
                float freq = 2f * (s.truth.getFrequency() - 0.5f);
                float conf = s.truth.getConfidence();
                if (freq < 0) {
                }
                x[cc] += freq * conf;
            }
        }
        cc++;
    }
    if (d.size() < 2) {
        data = null;
        return;
    }
    data = new SampleSet();
    int first = d.firstKey();
    int last = (int) nar.time();
    if (last - first > maxDataFrames * downSample) {
        first = last - maxDataFrames * downSample;
    }
    int frames = (int) (last - first);
    int bsize = getInputSize() * frames;
    int isize = getPredictionSize() * frames;
    if (actual == null || actual.length != bsize)
        actual = new double[bsize];
    else
        Arrays.fill(actual, 0);
    if (ideal == null || ideal.length != isize)
        ideal = new double[isize];
    else
        Arrays.fill(ideal, 0);
    int idealSize = getPredictionSize();
    int ac = 0, id = 0;
    double[] prevX = null;
    for (int i = first; i <= last; i++) {
        double[] x = d.get(i);
        if (x == null) {
            x = new double[inputSize];
        } else {
            if (normalizeInputVectors) {
                x = normalize(x);
            }
        }
        if (prevX != null) {
            System.arraycopy(prevX, 0, actual, ac, inputSize);
            ac += inputSize;
            System.arraycopy(getTrainedPrediction(x), 0, ideal, id, idealSize);
            id += idealSize;
        }
        prevX = x;
    }
    Sample s = new Sample(actual, ideal, inputSize, idealSize);
    data.add(s);
    if (trainer == null) {
        trainer = new GradientDescent();
        trainer.setNet(net);
        trainer.setRnd(rnd);
        trainer.setPermute(true);
        trainer.setTrainingSet(data);
        trainer.setLearningRate(learningrate);
        trainer.setMomentum(momentum);
        trainer.setEpochs(trainIterationsPerCycle);
        trainer.setEarlyStopping(false);
        trainer.setOnline(true);
        trainer.setTargetError(0);
        trainer.clearListener();
    } else {
    // trainer.reset();
    }
    trainer.train();
// System.out.println("LSTM error: " + trainer.getTrainingError());
}
Also used : Concept(nars.nal.entity.Concept) SampleSet(de.jannlab.data.SampleSet) Sample(de.jannlab.data.Sample) GradientDescent(de.jannlab.training.GradientDescent) TreeMap(java.util.TreeMap) Sentence(nars.nal.entity.Sentence)

Aggregations

Sample (de.jannlab.data.Sample)1 SampleSet (de.jannlab.data.SampleSet)1 GradientDescent (de.jannlab.training.GradientDescent)1 TreeMap (java.util.TreeMap)1 Concept (nars.nal.entity.Concept)1 Sentence (nars.nal.entity.Sentence)1