#BAEL-18260 Downgrade deeplearning4j to version 0.9.1, the latest non beta

This commit is contained in:
Alessio Stalla
2019-10-14 19:18:14 +02:00
parent db0af2415a
commit 6018eb39e4
3 changed files with 15 additions and 20 deletions

View File

@@ -48,7 +48,7 @@
</dependencies>
<properties>
<dl4j.version>1.0.0-beta5</dl4j.version>
<dl4j.version>0.9.1</dl4j.version> <!-- Latest non beta version -->
</properties>
</project>

View File

@@ -4,6 +4,7 @@ import org.datavec.api.records.reader.RecordReader;
import org.datavec.api.records.reader.impl.csv.CSVRecordReader;
import org.datavec.api.split.FileSplit;
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
import org.deeplearning4j.eval.Evaluation;
import org.deeplearning4j.nn.conf.BackpropType;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
@@ -11,8 +12,6 @@ import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.util.NetworkUtils;
import org.nd4j.evaluation.classification.Evaluation;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
@@ -51,11 +50,11 @@ public class IrisClassifier {
DataSet testData = testAndTrain.getTest();
MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
.maxNumLineSearchIterations(1000)
.iterations(1000)
.activation(Activation.TANH)
.weightInit(WeightInit.XAVIER)
//.regularization(true)
.l2(0.0001)
.regularization(true)
.learningRate(0.1).l2(0.0001)
.list()
.layer(0, new DenseLayer.Builder().nIn(FEATURES_COUNT).nOut(3)
.build())
@@ -64,12 +63,11 @@ public class IrisClassifier {
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.activation(Activation.SOFTMAX)
.nIn(3).nOut(CLASSES_COUNT).build())
.backpropType(BackpropType.Standard)//.pretrain(false)
.backpropType(BackpropType.Standard).pretrain(false)
.build();
MultiLayerNetwork model = new MultiLayerNetwork(configuration);
model.init();
NetworkUtils.setLearningRate(model, 0.1);
model.fit(trainingData);
INDArray output = model.output(testData.getFeatures());

View File

@@ -10,6 +10,7 @@ import org.datavec.api.split.FileSplit;
import org.datavec.image.loader.NativeImageLoader;
import org.datavec.image.recordreader.ImageRecordReader;
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
import org.deeplearning4j.eval.Evaluation;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.inputs.InputType;
@@ -21,15 +22,12 @@ import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.deeplearning4j.util.ModelSerializer;
import org.nd4j.evaluation.classification.Evaluation;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.dataset.api.preprocessor.DataNormalization;
import org.nd4j.linalg.dataset.api.preprocessor.ImagePreProcessingScaler;
import org.nd4j.linalg.learning.config.Nesterovs;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import org.nd4j.linalg.schedule.MapSchedule;
import org.nd4j.linalg.schedule.ScheduleType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -71,8 +69,7 @@ public class MnistClassifier {
String localFilePath = basePath + "mnist_png.tar.gz";
File file = new File(localFilePath);
if (!file.exists()) {
file.getParentFile()
.mkdirs();
file.getParentFile().mkdirs();
Utils.downloadAndSave(dataUrl, file);
Utils.extractTarArchive(file, basePath);
}
@@ -135,15 +132,15 @@ public class MnistClassifier {
.build();
final MultiLayerConfiguration config = new NeuralNetConfiguration.Builder().seed(seed)
.l2(0.0005) // ridge regression value
.updater(new Nesterovs(new MapSchedule(ScheduleType.ITERATION, learningRateSchedule)))
.updater(new Nesterovs()) //TODO new MapSchedule(ScheduleType.ITERATION, learningRateSchedule)
.weightInit(WeightInit.XAVIER)
.list()
.layer(layer1)
.layer(layer2)
.layer(layer3)
.layer(layer2)
.layer(layer4)
.layer(layer5)
.layer(0, layer1)
.layer(1, layer2)
.layer(2, layer3)
.layer(3, layer2)
.layer(4, layer4)
.layer(5, layer5)
.setInputType(InputType.convolutionalFlat(height, width, channels))
.build();