expect {0.1, 0.11, 0.12, 0.13} get {0.087, 0.0777, 0.067, 0.0577 } ???
why?
@Test
public void test3() throws InterruptedException {
MultiLayerNetwork net = createNeuralNet();
double[] v = new double[]{0.01, 0.02, 0.03, 0.04, 0.05, 0.06};
fitValues(net, v);
extractedOut(net, v); //expect {0.07, 0.08, 0.09, 0.1, 0.11, 0.12} get {0.069, 0.079, 0.088, 0.097, 0.10, 0.11, 0.117} it`s ok
double[] vn = new double[]{0.05, 0.06};
extractedOut(net, vn); // expect {0.07, 0.08, 0.09} get {0.0299, 0.0398, 0.0496} ???
v = new double[]{0.07, 0.08, 0.09}; // retrain (add data no model)
fitValues(net, v);
extractedOut(net, v); // expect {0.1, 0.11, 0.12, 0.13} get {0.087, 0.0777, 0.067, 0.0577 } ???
}
private void extractedOut(MultiLayerNetwork net, double[] v) {
double[] p = nextValues(net, v);
System.out.print(p[p.length-1]);
Arrays.stream(nextValues(net, p)).forEach(e->System.out.print(", " + e));
net.rnnClearPreviousState();
System.out.println(" ");
}
public void fitValues(MultiLayerNetwork net, double... v) {
double[] firstPeriod = Arrays.copyOf(v, v.length-1);
INDArray y0 = Nd4j.create(firstPeriod);
double[][][] data_1 = new double[][][]{{y0.toDoubleVector()}};
INDArray data1 = Nd4j.create(data_1);
double[] firstShiftPeriod = Arrays.copyOfRange(v, 1, v.length);
final INDArray y0s = Nd4j.create(firstShiftPeriod);
double[][][] data_1s = new double[][][]{{y0s.toDoubleVector()}};
INDArray data1s = Nd4j.create(data_1s);
long t = System.currentTimeMillis();
int epoch = 1024;
while (epoch > 0) {
net.fit(data1, data1s);
epoch--;
}
t = System.currentTimeMillis() - t;
System.out.println("Time: " + (t / 1000) + " sec. and " + (t - t / 1000) + " msec.");
}
public double[] nextValues(MultiLayerNetwork net, double... v) {
double[] firstPeriod = Arrays.copyOf(v, v.length);
INDArray y0 = Nd4j.create(firstPeriod);
double[][][] data_1 = new double[][][]{{y0.toDoubleVector()}};
INDArray data1 = Nd4j.create(data_1);
INDArray out = net.rnnTimeStep(data1);
return out.get(NDArrayIndex.indexesFor(0L, 0L)).toDoubleVector();
}
public MultiLayerNetwork createNeuralNet() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.seed(12345)
.weightInit(WeightInit.XAVIER)
.updater(new AdaGrad(0.005))
//.updater(new Nesterovs(MathFunctionsModel.learningRate, 0.9))
.list()
.layer(0, new LSTM.Builder()
.activation(Activation.TANH)
.nIn(1)
.nOut(100)
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
.gradientNormalizationThreshold(10)
.build())
.layer(1, new LSTM.Builder()
.activation(Activation.TANH)
.nIn(100)
.nOut(100)
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
.gradientNormalizationThreshold(10)
.build())
.layer(2, new LSTM.Builder()
.activation(Activation.TANH)
.nIn(100)
.nOut(100)
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
.gradientNormalizationThreshold(10)
.build())
.layer(3, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE)
.activation(Activation.TANH)
.nIn(100)
.nOut(1)
.gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
.gradientNormalizationThreshold(10)
.build())
.backpropType(BackpropType.TruncatedBPTT)
.tBPTTLength(100)
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
return net;
}