Java Code Examples for org.nd4j.linalg.util.FeatureUtil#toOutcomeMatrix()

The following examples show how to use org.nd4j.linalg.util.FeatureUtil#toOutcomeMatrix() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DataSetTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSplitTestAndTrain() throws Exception {
    INDArray labels = FeatureUtil.toOutcomeMatrix(new int[] {0, 0, 0, 0, 0, 0, 0, 0}, 1);
    DataSet data = new DataSet(Nd4j.rand(8, 1), labels);

    SplitTestAndTrain train = data.splitTestAndTrain(6, new Random(1));
    assertEquals(train.getTrain().getLabels().length(), 6);

    SplitTestAndTrain train2 = data.splitTestAndTrain(6, new Random(1));
    assertEquals(getFailureMessage(), train.getTrain().getFeatureMatrix(), train2.getTrain().getFeatureMatrix());

    DataSet x0 = new IrisDataSetIterator(150, 150).next();
    SplitTestAndTrain testAndTrain = x0.splitTestAndTrain(10);
    assertArrayEquals(new long[] {10, 4}, testAndTrain.getTrain().getFeatureMatrix().shape());
    assertEquals(x0.getFeatureMatrix().getRows(ArrayUtil.range(0, 10)), testAndTrain.getTrain().getFeatureMatrix());
    assertEquals(x0.getLabels().getRows(ArrayUtil.range(0, 10)), testAndTrain.getTrain().getLabels());


}
 
Example 2
Source File: DataSetTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSplitTestAndTrain() {
    INDArray labels = FeatureUtil.toOutcomeMatrix(new int[] {0, 0, 0, 0, 0, 0, 0, 0}, 1);
    DataSet data = new DataSet(Nd4j.rand(8, 1), labels);

    SplitTestAndTrain train = data.splitTestAndTrain(6, new Random(1));
    assertEquals(train.getTrain().getLabels().length(), 6);

    SplitTestAndTrain train2 = data.splitTestAndTrain(6, new Random(1));
    assertEquals(getFailureMessage(), train.getTrain().getFeatures(), train2.getTrain().getFeatures());

    DataSet x0 = new IrisDataSetIterator(150, 150).next();
    SplitTestAndTrain testAndTrain = x0.splitTestAndTrain(10);
    assertArrayEquals(new long[] {10, 4}, testAndTrain.getTrain().getFeatures().shape());
    assertEquals(x0.getFeatures().getRows(ArrayUtil.range(0, 10)), testAndTrain.getTrain().getFeatures());
    assertEquals(x0.getLabels().getRows(ArrayUtil.range(0, 10)), testAndTrain.getTrain().getLabels());


}
 
Example 3
Source File: LossLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Fit the model
 *
 * @param examples the examples to classify (one example in each row)
 * @param labels   the labels for each example (the number of labels must match
 */
@Override
public void fit(INDArray examples, int[] labels) {
    INDArray outcomeMatrix = FeatureUtil.toOutcomeMatrix(labels, numLabels());
    fit(examples, outcomeMatrix);

}
 
Example 4
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDenseToOutputLayer() {
    Nd4j.getRandom().setSeed(12345);
    final int numRows = 76;
    final int numColumns = 76;
    int nChannels = 3;
    int outputNum = 6;
    int seed = 123;

    //setup the network
    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                    .l1(1e-1).l2(2e-4).dropOut(0.5).miniBatch(true)
                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
                    .layer(0, new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
                                    .activation(Activation.RELU).build())
                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
                                    .build())
                    .layer(2, new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
                                    .activation(Activation.RELU).build())
                    .layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
                                    .build())
                    .layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
                    .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
                                    .build())

                    .setInputType(InputType.convolutional(numRows, numColumns, nChannels));

    DataSet d = new DataSet(Nd4j.rand(new int[]{10, nChannels, numRows, numColumns}),
                    FeatureUtil.toOutcomeMatrix(new int[] {1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, 6));
    MultiLayerNetwork network = new MultiLayerNetwork(builder.build());
    network.init();
    network.fit(d);

}