MultilayerConFigureTion返回DL4JinvalidInputeXception:输入大小
我的CSVDATASET的结构
N,P,K,temperature,humidity,ph,rainfall,label
90,42,43,20.87974371,82.00274423,6.502985292000001,202.9355362,rice
是预测给定特征的最佳作物的建议。我的管道如下:
private static void dataLoad2() {
try(RecordReader myReader = new CSVRecordReader(1,',')){
myReader.initialize(
//reading file
new FileSplit(
//setting dir
new File("dir/to/cropSet.csv")));
Schema dbSchema = new Schema.Builder()
.addColumnInteger("N")
.addColumnInteger("P")
.addColumnInteger("K")
.addColumnDouble("temperature")
.addColumnDouble("humidity")
.addColumnDouble("ph")
.addColumnDouble("rainfall")
.addColumnCategorical("label", "rice", "maize", "chickpea", "kidneybeans", "pigeonpeas","mothbeans", "mungbean", "blackgram", "lentil", "pomegranate","banana", "mango", "grapes", "watermelon", "muskmelon", "apple","orange", "papaya", "coconut", "cotton", "jute", "coffee")
.build();
DataAnalysis analysis = AnalyzeLocal.analyze(dbSchema, myReader);
TransformProcess transformer = new TransformProcess.Builder(dbSchema)
.convertToInteger("N" ).normalize("N",Normalize.MinMax,analysis)
.convertToInteger("P" ).normalize("N",Normalize.MinMax,analysis)
.convertToInteger("K" ).normalize("K", Normalize.MinMax,analysis )
.removeColumns("temperature" )
.removeColumns("humidity" )
.removeColumns("ph" )
.removeColumns("rainfall" )
.categoricalToInteger("label")
.build();
Schema transSchema = transformer.getFinalSchema();
RecordReader recordReader2 = new CSVRecordReader(1, ',');
TransformProcessRecordReader tprr = new TransformProcessRecordReader(recordReader2,transformer);
tprr.initialize( //reading file );
new FileSplit(
//setting dir
new File("/Users/mac/Desktop/RTS_ML/src/main/resources/cropSet.csv")
));
RecordReaderDataSetIterator builderObj = new RecordReaderDataSetIterator.Builder(tprr,100)
.classification(transSchema.getIndexOfColumn("label"),23)
.build()
;
DataSet mutiData = builderObj.next();
//Normalizing the data
DataNormalization normalizerObj = new NormalizerStandardize();
//Fit dataset to normalizer
normalizerObj.fit(mutiData);
//Perform normalization
normalizerObj.transform(mutiData);
//Splitting the dataset
SplitTestAndTrain testAndTrain = mutiData.splitTestAndTrain(75);
//train data
DataSet train = testAndTrain.getTrain();
//test data
DataSet test = testAndTrain.getTest();
cropNetwork( train,test,transSchema);
}catch (FrameFilter.Exception e){
out.println("Error: " + e.getLocalizedMessage());
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
和模型:
private static void cropNetwork(DataSet training, DataSet testing, Schema nwdbSchema){
int outputNum = 23;
int numOfRows =training.numExamples();
int numOfColumns = nwdbSchema.numColumns();
MultiLayerConfiguration nnConfig = new NeuralNetConfiguration.Builder()
.seed(0xC0FFEE)
.weightInit(WeightInit.XAVIER)
.activation(Activation.TANH)
.updater(new Adam.Builder().learningRate(0.005).build())
.l2(0.000316)
.list()
.layer(0,new DenseLayer.Builder().nIn( numOfRows * numOfColumns).nOut(24).activation(Activation.RELU)
.weightInit(WeightInit.XAVIER).build())
.layer(1,new DenseLayer.Builder().nIn(numOfRows * numOfColumns).nOut(numOfRows * numOfColumns).activation(Activation.RELU)
.weightInit(WeightInit.XAVIER).build())
.layer(2,new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nIn(numOfRows * numOfColumns).nOut(23)
.activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER).build())
.setInputType(InputType.feedForward(numOfRows * numOfColumns))
.build();
// Initlizing model
MultiLayerNetwork model2 = new MultiLayerNetwork(nnConfig);
model2.init();
//nnModel.setListeners( new ScoreIterationListener(100));
// Training model
model2.fit(training);
try (INDArray modeOutput = model2.output(training.getFeatures(),false)) {
Evaluation evalModel = new Evaluation(23);
evalModel.eval(testing.getLabels(),modeOutput);
out.println(modeOutput);
}
}
此返回:
线程“主” org.deeplearning4j.exception.dl4jinvalidinputexception中的异常:输入大小(3列; shape = [75,3]图层输入大小(图层#inputs = 300)(图层名称:layer0,图层索引:0,图层类型:denselayer) 请访问org.deeplearning4j.nn.layers.baselayer.preoutputwithprenorm(baselayer.java:317)
我知道这肯定与我的网络配置有关,但我的一部分感觉就像是管道。我在做什么?
My CSVDataset is structured as
N,P,K,temperature,humidity,ph,rainfall,label
90,42,43,20.87974371,82.00274423,6.502985292000001,202.9355362,rice
The goal is to predict the recommend the optimal crop given the features. My pipeline is as follows:
private static void dataLoad2() {
try(RecordReader myReader = new CSVRecordReader(1,',')){
myReader.initialize(
//reading file
new FileSplit(
//setting dir
new File("dir/to/cropSet.csv")));
Schema dbSchema = new Schema.Builder()
.addColumnInteger("N")
.addColumnInteger("P")
.addColumnInteger("K")
.addColumnDouble("temperature")
.addColumnDouble("humidity")
.addColumnDouble("ph")
.addColumnDouble("rainfall")
.addColumnCategorical("label", "rice", "maize", "chickpea", "kidneybeans", "pigeonpeas","mothbeans", "mungbean", "blackgram", "lentil", "pomegranate","banana", "mango", "grapes", "watermelon", "muskmelon", "apple","orange", "papaya", "coconut", "cotton", "jute", "coffee")
.build();
DataAnalysis analysis = AnalyzeLocal.analyze(dbSchema, myReader);
TransformProcess transformer = new TransformProcess.Builder(dbSchema)
.convertToInteger("N" ).normalize("N",Normalize.MinMax,analysis)
.convertToInteger("P" ).normalize("N",Normalize.MinMax,analysis)
.convertToInteger("K" ).normalize("K", Normalize.MinMax,analysis )
.removeColumns("temperature" )
.removeColumns("humidity" )
.removeColumns("ph" )
.removeColumns("rainfall" )
.categoricalToInteger("label")
.build();
Schema transSchema = transformer.getFinalSchema();
RecordReader recordReader2 = new CSVRecordReader(1, ',');
TransformProcessRecordReader tprr = new TransformProcessRecordReader(recordReader2,transformer);
tprr.initialize( //reading file );
new FileSplit(
//setting dir
new File("/Users/mac/Desktop/RTS_ML/src/main/resources/cropSet.csv")
));
RecordReaderDataSetIterator builderObj = new RecordReaderDataSetIterator.Builder(tprr,100)
.classification(transSchema.getIndexOfColumn("label"),23)
.build()
;
DataSet mutiData = builderObj.next();
//Normalizing the data
DataNormalization normalizerObj = new NormalizerStandardize();
//Fit dataset to normalizer
normalizerObj.fit(mutiData);
//Perform normalization
normalizerObj.transform(mutiData);
//Splitting the dataset
SplitTestAndTrain testAndTrain = mutiData.splitTestAndTrain(75);
//train data
DataSet train = testAndTrain.getTrain();
//test data
DataSet test = testAndTrain.getTest();
cropNetwork( train,test,transSchema);
}catch (FrameFilter.Exception e){
out.println("Error: " + e.getLocalizedMessage());
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
And the model:
private static void cropNetwork(DataSet training, DataSet testing, Schema nwdbSchema){
int outputNum = 23;
int numOfRows =training.numExamples();
int numOfColumns = nwdbSchema.numColumns();
MultiLayerConfiguration nnConfig = new NeuralNetConfiguration.Builder()
.seed(0xC0FFEE)
.weightInit(WeightInit.XAVIER)
.activation(Activation.TANH)
.updater(new Adam.Builder().learningRate(0.005).build())
.l2(0.000316)
.list()
.layer(0,new DenseLayer.Builder().nIn( numOfRows * numOfColumns).nOut(24).activation(Activation.RELU)
.weightInit(WeightInit.XAVIER).build())
.layer(1,new DenseLayer.Builder().nIn(numOfRows * numOfColumns).nOut(numOfRows * numOfColumns).activation(Activation.RELU)
.weightInit(WeightInit.XAVIER).build())
.layer(2,new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nIn(numOfRows * numOfColumns).nOut(23)
.activation(Activation.SOFTMAX).weightInit(WeightInit.XAVIER).build())
.setInputType(InputType.feedForward(numOfRows * numOfColumns))
.build();
// Initlizing model
MultiLayerNetwork model2 = new MultiLayerNetwork(nnConfig);
model2.init();
//nnModel.setListeners( new ScoreIterationListener(100));
// Training model
model2.fit(training);
try (INDArray modeOutput = model2.output(training.getFeatures(),false)) {
Evaluation evalModel = new Evaluation(23);
evalModel.eval(testing.getLabels(),modeOutput);
out.println(modeOutput);
}
}
This returns:
Exception in thread "main" org.deeplearning4j.exception.DL4JInvalidInputException: Input size (3 columns; shape = [75, 3]) is invalid: does not match layer input size (layer # inputs = 300) (layer name: layer0, layer index: 0, layer type: DenseLayer)
at org.deeplearning4j.nn.layers.BaseLayer.preOutputWithPreNorm(BaseLayer.java:317)
I know it's definitely got to do with my network configuration, but part of me feels like it's the pipeline. What am I doing worng?
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论
评论(1)
对于InputType.FeedForward,仅应等于输入层中的列数或功能。
然后,这将设置其余图层以具有适当的输入数量。
每行都是一个示例。您无需告诉inputType,默认情况下,前面的行数始终会变化。
For InputType.feedForward that should only be equal to the number of columns or features in the input layer.
That then sets up the rest of the layers to have the appropriate number of inputs.
Each row is considered an example. You don't need to tell the InputType the number of rows up front that is always expected to vary by default.