do this:
package org.example;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.modelimport.keras.KerasModelImport;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.transferlearning.FineTuneConfiguration;
import org.deeplearning4j.nn.transferlearning.TransferLearning;
import org.nd4j.linalg.activations.impl.ActivationSoftmax;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.learning.config.Adam;
public class TestForumMlp {
public static void main(String...args) throws Exception {
String SIMPLE_MLP = "simple_mlp.h5";
System.out.println(SIMPLE_MLP);
MultiLayerNetwork model = KerasModelImport.importKerasSequentialModelAndWeights(SIMPLE_MLP, false);
MultiLayerNetwork transferLearning = new TransferLearning.Builder(model)
.fineTuneConfiguration(new FineTuneConfiguration.Builder()
.updater(new Adam()).build())
.addLayer(new OutputLayer.Builder()
.nIn(10)
.nOut(10)
.activation(new ActivationSoftmax())
.build())
.build();
INDArray input = Nd4j.create(DataType.FLOAT, 256, 100);
INDArray output = model.output(input);
transferLearning.fit(input, output);
}
}
Remove the extra output layer and you should be good to go. 99% of the time model import is used for inference then you add your updaters and other training information on top of that.