Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#reshape()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#reshape() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BasicModelUtils.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
protected INDArray adjustRank(INDArray words) {
    if (lookupTable instanceof InMemoryLookupTable) {
        InMemoryLookupTable l = (InMemoryLookupTable) lookupTable;

        INDArray syn0 = l.getSyn0();
        if (!words.dataType().equals(syn0.dataType())) {
            return words.castTo(syn0.dataType());
        }
        if (words.rank() == 0 || words.rank() > 2) {
            throw new IllegalStateException("Invalid rank for wordsNearest method");
        } else if (words.rank() == 1) {
            return words.reshape(1, -1);
        }
    }
    return words;
}
 
Example 2
Source File: RnnToFeedForwardPreProcessor.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray backprop(INDArray output, int miniBatchSize, LayerWorkspaceMgr workspaceMgr) {
    if (output == null)
        return null; //In a few cases: output may be null, and this is valid. Like time series data -> embedding layer
    //Need to reshape FeedForward layer epsilons (2d) to 3d (for use in RNN layer backprop calculations)
    if (output.rank() != 2)
        throw new IllegalArgumentException(
                        "Invalid input: expect NDArray with rank 2 (i.e., epsilons from feed forward layer)");
    if (output.ordering() != 'f' || !Shape.hasDefaultStridesForShape(output))
        output = workspaceMgr.dup(ArrayType.ACTIVATION_GRAD, output, 'f');

    val shape = output.shape();
    INDArray reshaped = output.reshape('f', miniBatchSize, shape[0] / miniBatchSize, shape[1]);
    if (rnnDataFormat == RNNFormat.NCW){
        reshaped = reshaped.permute(0, 2, 1);
    }
    return workspaceMgr.leverageTo(ArrayType.ACTIVATION_GRAD, reshaped);
}
 
Example 3
Source File: LapackTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testQRSquare() {
    INDArray A = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9});
    A = A.reshape('c', 3, 3);
    INDArray O = Nd4j.create(A.shape());
    Nd4j.copy(A, O);
    INDArray R = Nd4j.create(A.columns(), A.columns());

    Nd4j.getBlasWrapper().lapack().geqrf(A, R);

    A.mmuli(R);
    O.subi(A);
    DataBuffer db = O.data();
    for (int i = 0; i < db.length(); i++) {
        assertEquals(0, db.getFloat(i), 1e-5);
    }
}
 
Example 4
Source File: LapackTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testCholeskyL() {
    INDArray A = Nd4j.create(new double[] {2, -1, 1, -1, 2, -1, 1, -1, 2,});
    A = A.reshape('c', 3, 3);
    INDArray O = Nd4j.create(A.shape());
    Nd4j.copy(A, O);

    Nd4j.getBlasWrapper().lapack().potrf(A, true);

    A.mmuli(A.transpose());
    O.subi(A);
    DataBuffer db = O.data();
    for (int i = 0; i < db.length(); i++) {
        assertEquals(0, db.getFloat(i), 1e-5);
    }
}
 
Example 5
Source File: MovingWindowMatrix.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Moving window, capture a row x column moving window of
 * a given matrix
 * @param flattened whether the arrays should be flattened or not
 * @return the list of moving windows
 */
public List<INDArray> windows(boolean flattened) {
    List<INDArray> ret = new ArrayList<>();
    int window = 0;

    for (int i = 0; i < toSlice.length(); i++) {
        if (window >= toSlice.length())
            break;
        double[] w = new double[this.windowRowSize * this.windowColumnSize];
        for (int count = 0; count < this.windowRowSize * this.windowColumnSize; count++) {
            w[count] = toSlice.getDouble(count + window);
        }
        INDArray add = Nd4j.create(w);
        if (flattened)
            add = add.ravel();
        else
            add = add.reshape(windowRowSize, windowColumnSize);
        if (addRotate) {
            INDArray currRotation = add.dup();
            //3 different orientations besides the original
            for (int rotation = 0; rotation < 3; rotation++) {
                Nd4j.rot90(currRotation);
                ret.add(currRotation.dup());
            }

        }

        window += this.windowRowSize * this.windowColumnSize;
        ret.add(add);
    }


    return ret;
}
 
Example 6
Source File: ShapeTestsC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testReshapePermute() {
    INDArray arrNoPermute = Nd4j.ones(DataType.DOUBLE,5, 3, 4);
    INDArray reshaped2dNoPermute = arrNoPermute.reshape(5 * 3, 4); //OK
    assertArrayEquals(reshaped2dNoPermute.shape(), new long[] {5 * 3, 4});

    INDArray arr = Nd4j.ones(DataType.DOUBLE,5, 4, 3);
    INDArray permuted = arr.permute(0, 2, 1);
    assertArrayEquals(arrNoPermute.shape(), permuted.shape());
    INDArray reshaped2D = permuted.reshape(5 * 3, 4); //NullPointerException
    assertArrayEquals(reshaped2D.shape(), new long[] {5 * 3, 4});
}
 
Example 7
Source File: Subsampling1DLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon, LayerWorkspaceMgr workspaceMgr) {
    if (epsilon.rank() != 3)
        throw new DL4JInvalidInputException("Got rank " + epsilon.rank()
                        + " array as epsilon for Subsampling1DLayer backprop with shape "
                        + Arrays.toString(epsilon.shape())
                        + ". Expected rank 3 array with shape [minibatchSize, features, length]. " + layerId());
    if(maskArray != null){
        INDArray maskOut = feedForwardMaskArray(maskArray, MaskState.Active, (int)epsilon.size(0)).getFirst();
        Preconditions.checkState(epsilon.size(0) == maskOut.size(0) && epsilon.size(2) == maskOut.size(1),
                "Activation gradients dimensions (0,2) and mask dimensions (0,1) don't match: Activation gradients %s, Mask %s",
                epsilon.shape(), maskOut.shape());
        Broadcast.mul(epsilon, maskOut, epsilon, 0, 2);
    }

    // add singleton fourth dimension to input and next layer's epsilon
    INDArray origInput = input;
    input = input.castTo(dataType).reshape(input.size(0), input.size(1), input.size(2), 1);
    epsilon = epsilon.reshape(epsilon.size(0), epsilon.size(1), epsilon.size(2), 1);

    // call 2D SubsamplingLayer's backpropGradient method
    Pair<Gradient, INDArray> gradientEpsNext = super.backpropGradient(epsilon, workspaceMgr);
    INDArray epsNext = gradientEpsNext.getSecond();

    // remove singleton fourth dimension from input and current epsilon
    input = origInput;
    epsNext = epsNext.reshape(epsNext.size(0), epsNext.size(1), epsNext.size(2));

    return new Pair<>(gradientEpsNext.getFirst(), epsNext);
}
 
Example 8
Source File: ShapeTestsC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testPermuteReshape() {
    INDArray arrTest = Nd4j.arange(60).reshape('c', 3, 4, 5);
    INDArray permute = arrTest.permute(2, 1, 0);
    assertArrayEquals(new long[] {5, 4, 3}, permute.shape());
    assertArrayEquals(new long[] {1, 5, 20}, permute.stride());
    INDArray reshapedPermute = permute.reshape(-1, 12);
    assertArrayEquals(new long[] {5, 12}, reshapedPermute.shape());
    assertArrayEquals(new long[] {12, 1}, reshapedPermute.stride());

}
 
Example 9
Source File: LapackTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCholeskyU() {
    INDArray A = Nd4j.create(new double[] {3, -1, 2, -1, 3, -1, 2, -1, 3,});
    A = A.reshape('f', 3, 3);
    INDArray O = Nd4j.create(A.dataType(), A.shape());
    Nd4j.copy(A, O);

    Nd4j.getBlasWrapper().lapack().potrf(A, false);
    A = A.transpose().mmul(A);
    O.subi(A);
    DataBuffer db = O.data();
    for (int i = 0; i < db.length(); i++) {
        assertEquals(0, db.getFloat(i), 1e-5);
    }
}
 
Example 10
Source File: WeightInitVarScalingNormalFanIn.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray init(double fanIn, double fanOut, long[] shape, char order, INDArray paramView) {
    double std;
    if(scale == null){
        std = Math.sqrt(1.0 / fanIn);
    } else {
        std = Math.sqrt(scale / fanIn);
    }

    Nd4j.exec(new TruncatedNormalDistribution(paramView, 0.0, std));
    return paramView.reshape(order, shape);
}
 
Example 11
Source File: WeightInitVarScalingUniformFanOut.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray init(double fanIn, double fanOut, long[] shape, char order, INDArray paramView) {
    double scalingFanOut = 3.0 / Math.sqrt(fanOut);
    if(scale != null)
        scalingFanOut *= scale;
    Nd4j.rand(paramView, Nd4j.getDistributions().createUniform(-scalingFanOut, scalingFanOut));
    return paramView.reshape(order, shape);
}
 
Example 12
Source File: TimeSeriesUtils.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Reshape time series mask arrays. This should match the assumptions (f order, etc) in RnnOutputLayer
 * @param timeSeriesMaskAsVector    Mask array to reshape to a column vector
 * @return                  Mask array as a column vector
 */
public static INDArray reshapeVectorToTimeSeriesMask(INDArray timeSeriesMaskAsVector, int minibatchSize) {
    if (!timeSeriesMaskAsVector.isVector())
        throw new IllegalArgumentException("Cannot reshape mask: expected vector");

    val timeSeriesLength = timeSeriesMaskAsVector.length() / minibatchSize;

    return timeSeriesMaskAsVector.reshape('f', minibatchSize, timeSeriesLength);
}
 
Example 13
Source File: NDArrayCreationUtil.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public static List<Pair<INDArray, String>> get6dReshapedWithShape(int seed, int[] shape, DataType dataType) {
    Nd4j.getRandom().setSeed(seed);
    int[] shape3d = {shape[0] * shape[2], shape[4] * shape[5], shape[1] * shape[3]};
    INDArray array3d = Nd4j.rand(dataType, shape3d);
    INDArray array6d = array3d.reshape(ArrayUtil.toLongArray(shape));
    return Collections.singletonList(new Pair<>(array6d,
                    "get6dReshapedWithShape(" + seed + "," + Arrays.toString(shape) + ").get(0)"));
}
 
Example 14
Source File: GradientCheckTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testEmbeddingSequenceLayer(){
    Nd4j.getRandom().setSeed(12345);

    for(RNNFormat seqOutputFormat : RNNFormat.values()) {
        for (boolean maskArray : new boolean[]{false, true}) {
            for (int inputRank : new int[]{2, 3}) {

                MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                        .dataType(DataType.DOUBLE)
                        .seed(12345)
                        .updater(new NoOp())
                        .weightInit(new NormalDistribution(0, 1))
                        .list()
                        .layer(new EmbeddingSequenceLayer.Builder()
                                .nIn(8)
                                .nOut(4)
                                .outputDataFormat(seqOutputFormat)
                                .build())
                        .layer(new RnnOutputLayer.Builder().nIn(4).nOut(3).activation(Activation.TANH)
                                .dataFormat(seqOutputFormat)
                                .lossFunction(LossFunction.MSE).build())
                        .build();

                MultiLayerNetwork net = new MultiLayerNetwork(conf);
                net.init();

                boolean ncw = seqOutputFormat == RNNFormat.NCW;

                INDArray in = Transforms.floor(Nd4j.rand(3, 6).muli(8));    //Integers 0 to 7 inclusive
                INDArray label = Nd4j.rand(DataType.FLOAT, ncw ? new int[]{3, 3, 6} : new int[]{3,6,3});

                if (inputRank == 3) {
                    //Reshape from [3,6] to [3,1,6]
                    in = in.reshape('c', 3, 1, 6);
                }

                INDArray fMask = null;
                if (maskArray) {
                    fMask = Nd4j.create(new double[][]{{1, 1, 1, 1, 1, 1},
                            {1, 1, 0, 0, 0, 0},
                            {1, 0, 0, 0, 0, 0}});

                }

                String msg = "mask=" + maskArray + ", inputRank=" + inputRank;
                boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(in)
                        .labels(label).inputMask(fMask));
                assertTrue(msg, gradOK);
                TestUtils.testModelSerialization(net);


                //Also: if mask is present, double check that the masked steps don't impact score
                if (maskArray) {
                    DataSet ds = new DataSet(in, label, fMask, null);
                    double score = net.score(ds);
                    if (inputRank == 2) {
                        in.putScalar(1, 2, 0);
                        in.putScalar(2, 1, 0);
                        in.putScalar(2, 2, 0);
                    } else {
                        in.putScalar(1, 0, 2, 0);
                        in.putScalar(2, 0, 1, 0);
                        in.putScalar(2, 0, 2, 0);
                    }
                    double score2 = net.score(ds);
                    assertEquals(score, score2, 1e-6);
                    if (inputRank == 2) {
                        in.putScalar(1, 2, 1);
                        in.putScalar(2, 1, 1);
                        in.putScalar(2, 2, 1);
                    } else {
                        in.putScalar(1, 0, 2, 1);
                        in.putScalar(2, 0, 1, 1);
                        in.putScalar(2, 0, 2, 1);
                    }
                    double score3 = net.score(ds);
                    assertEquals(score, score3, 1e-6);
                }
            }
        }
    }
}
 
Example 15
Source File: ImagePreProcessortTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void simpleImageTestMulti() {
    INDArray rChannels = Nd4j.zeros(10, 10).addi(128);
    INDArray gChannels = Nd4j.zeros(10, 10).addi(64);
    INDArray bChannels = Nd4j.zeros(10, 10).addi(255);
    INDArray image = Nd4j.vstack(rChannels, gChannels, bChannels).reshape(3, 10, 10);
    INDArray orig = image.dup();

    //System.out.println(Arrays.toString(image.shape()));
    MultiDataSet ds = new MultiDataSet(new INDArray[]{Nd4j.valueArrayOf(10, 100.0), image.reshape(1, 3, 10, 10)},
            new INDArray[]{Nd4j.ones(1, 1)});
    ImageMultiPreProcessingScaler myScaler = new ImageMultiPreProcessingScaler(1);
    //So this should scale to 0.5,0.25 and 1;
    INDArray expected = image.mul(0);
    expected.slice(0, 0).addi(0.5);
    expected.slice(1, 0).addi(0.25);
    expected.slice(2, 0).addi(1.0);
    myScaler.transform(ds);
    assertEquals(Nd4j.valueArrayOf(10, 100.0), ds.getFeatures(0));
    assertTrue(Transforms.abs(ds.getFeatures(1).sub(expected)).maxNumber().doubleValue() <= 0.01);

    //Now giving it 16 bits instead of the default
    //System.out.println(Arrays.toString(image.shape()));
    ds = new MultiDataSet(new INDArray[]{Nd4j.valueArrayOf(10, 100.0), image.reshape(1, 3, 10, 10)},
            new INDArray[]{Nd4j.ones(1, 1)});
    myScaler = new ImageMultiPreProcessingScaler(0.0, 1.0, 16, new int[]{1});
    //So this should scale to 0.5,0.25 and 1;
    expected = image.mul(0);
    expected.slice(0, 0).addi(0.5 / 256);
    expected.slice(1, 0).addi(0.25 / 256);
    expected.slice(2, 0).addi(1.0 / 256);
    myScaler.transform(ds);
    assertEquals(Nd4j.valueArrayOf(10, 100.0), ds.getFeatures(0));
    assertTrue(Transforms.abs(ds.getFeatures(1).sub(expected)).maxNumber().doubleValue() <= 0.01);

    //So this should not change the value
    INDArray before = ds.getFeatures(1).dup();
    myScaler = new ImageMultiPreProcessingScaler(0.0, 1.0, new int[]{1});
    myScaler.transform(ds);
    assertTrue(Transforms.abs(ds.getFeatures(1).sub(before)).maxNumber().doubleValue() <= 0.0001);

    //Scaling back up should give the same results
    myScaler = new ImageMultiPreProcessingScaler(0.0, (256.0 * 256 * 256 - 1), new int[]{1});
    myScaler.transform(ds);
    assertTrue(Transforms.abs(ds.getFeatures(1).sub(image)).maxNumber().doubleValue() <= 1);

    //Revert:
    before = orig.dup();
    myScaler = new ImageMultiPreProcessingScaler(0.0, 1.0, 1, new int[]{1});
    MultiDataSet beforeDS = new MultiDataSet(new INDArray[]{null, before}, new INDArray[]{null});
    myScaler.transform(beforeDS);
    myScaler.revertFeatures(beforeDS.getFeatures());
    assertEquals(orig, before);
}
 
Example 16
Source File: YoloUtils.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public static INDArray activate(@NonNull INDArray boundingBoxPriors, @NonNull INDArray input, boolean nchw, LayerWorkspaceMgr layerWorkspaceMgr){
    if(!nchw)
        input = input.permute(0,3,1,2); //NHWC to NCHW

    long mb = input.size(0);
    long h = input.size(2);
    long w = input.size(3);
    long b = boundingBoxPriors.size(0);
    long c = input.size(1)/b-5;  //input.size(1) == b * (5 + C) -> C = (input.size(1)/b) - 5

    INDArray output = layerWorkspaceMgr.create(ArrayType.ACTIVATIONS, input.dataType(), input.shape(), 'c');
    INDArray output5 = output.reshape('c', mb, b, 5+c, h, w);
    INDArray output4 = output;  //output.get(all(), interval(0,5*b), all(), all());
    INDArray input4 = input.dup('c');    //input.get(all(), interval(0,5*b), all(), all()).dup('c');
    INDArray input5 = input4.reshape('c', mb, b, 5+c, h, w);

    //X/Y center in grid: sigmoid
    INDArray predictedXYCenterGrid = input5.get(all(), all(), interval(0,2), all(), all());
    Transforms.sigmoid(predictedXYCenterGrid, false);

    //width/height: prior * exp(input)
    INDArray predictedWHPreExp = input5.get(all(), all(), interval(2,4), all(), all());
    INDArray predictedWH = Transforms.exp(predictedWHPreExp, false);
    Broadcast.mul(predictedWH, boundingBoxPriors.castTo(input.dataType()), predictedWH, 1, 2);  //Box priors: [b, 2]; predictedWH: [mb, b, 2, h, w]

    //Confidence - sigmoid
    INDArray predictedConf = input5.get(all(), all(), point(4), all(), all());   //Shape: [mb, B, H, W]
    Transforms.sigmoid(predictedConf, false);

    output4.assign(input4);

    //Softmax
    //TODO OPTIMIZE?
    INDArray inputClassesPreSoftmax = input5.get(all(), all(), interval(5, 5+c), all(), all());   //Shape: [minibatch, C, H, W]
    INDArray classPredictionsPreSoftmax2d = inputClassesPreSoftmax.permute(0,1,3,4,2) //[minibatch, b, c, h, w] To [mb, b, h, w, c]
            .dup('c').reshape('c', new long[]{mb*b*h*w, c});
    Transforms.softmax(classPredictionsPreSoftmax2d, false);
    INDArray postSoftmax5d = classPredictionsPreSoftmax2d.reshape('c', mb, b, h, w, c ).permute(0, 1, 4, 2, 3);

    INDArray outputClasses = output5.get(all(), all(), interval(5, 5+c), all(), all());   //Shape: [minibatch, C, H, W]
    outputClasses.assign(postSoftmax5d);

    if(!nchw)
        output = output.permute(0,2,3,1);       //NCHW to NHWC

    return output;
}
 
Example 17
Source File: EmbeddingSequenceLayer.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon, LayerWorkspaceMgr workspaceMgr) {
    assertInputSet(true);
    INDArray z = preOutput(true, workspaceMgr);
    INDArray delta = layerConf().getActivationFn().backprop(z, epsilon).getFirst(); //Shape: [mb, vector, seqLength]

    boolean ncw = layerConf().getOutputFormat() == RNNFormat.NCW;

    if (maskArray != null) {
        if(ncw){
            delta = Broadcast.mul(delta, maskArray, delta, 0, 2);
        } else {
            delta = Broadcast.mul(delta, maskArray, delta, 0, 1);
        }
    }

    int inputLength = layerConf().getInputLength();
    long numSamples = input.size(0);
    val nOut = layerConf().getNOut();

    if (delta.ordering() != 'c' || delta.isView() || !hasDefaultStridesForShape(delta)){
        delta = delta.dup('c');
    }

    if(ncw){
        delta = delta.permute(0, 2, 1);     //From [minibatch, nOut, length] to [minibatch, length, nOut]
    }

    delta = delta.reshape('c',inputLength * numSamples, nOut);

    INDArray weightGradients = gradientViews.get(DefaultParamInitializer.WEIGHT_KEY);
    weightGradients.assign(0);

    if (!hasDefaultStridesForShape(input))
        input = workspaceMgr.dup(ArrayType.ACTIVATIONS, input, 'f');

    INDArray indices = Nd4j.createFromArray(indexes);
    Nd4j.scatterUpdate(org.nd4j.linalg.api.ops.impl.scatter.ScatterUpdate.UpdateOp.ADD, weightGradients, indices, delta, WEIGHT_DIM);

    Gradient ret = new DefaultGradient();
    ret.gradientForVariable().put(DefaultParamInitializer.WEIGHT_KEY, weightGradients);

    if (hasBias()) {
        INDArray biasGradientsView = gradientViews.get(DefaultParamInitializer.BIAS_KEY);
        delta.sum(biasGradientsView, 0); //biasGradientView is initialized/zeroed first in sum op
        ret.gradientForVariable().put(DefaultParamInitializer.BIAS_KEY, biasGradientsView);
    }

    return new Pair<>(ret, null);
}
 
Example 18
Source File: ImagePreProcessortTest.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void simpleImageTestMulti() {
    INDArray rChannels = Nd4j.zeros(10, 10).addi(128);
    INDArray gChannels = Nd4j.zeros(10, 10).addi(64);
    INDArray bChannels = Nd4j.zeros(10, 10).addi(255);
    INDArray image = Nd4j.vstack(rChannels, gChannels, bChannels).reshape(3, 10, 10);
    INDArray orig = image.dup();

    //System.out.println(Arrays.toString(image.shape()));
    MultiDataSet ds = new MultiDataSet(new INDArray[]{Nd4j.valueArrayOf(10, 100.0), image.reshape(1, 3, 10, 10)},
            new INDArray[]{Nd4j.ones(1, 1)});
    ImageMultiPreProcessingScaler myScaler = new ImageMultiPreProcessingScaler(1);
    //So this should scale to 0.5,0.25 and 1;
    INDArray expected = image.mul(0);
    expected.slice(0, 0).addi(0.5);
    expected.slice(1, 0).addi(0.25);
    expected.slice(2, 0).addi(1.0);
    myScaler.transform(ds);
    assertEquals(Nd4j.valueArrayOf(10, 100.0), ds.getFeatures(0));
    assertTrue(Transforms.abs(ds.getFeatures(1).sub(expected)).maxNumber().doubleValue() <= 0.01);

    //Now giving it 16 bits instead of the default
    //System.out.println(Arrays.toString(image.shape()));
    ds = new MultiDataSet(new INDArray[]{Nd4j.valueArrayOf(10, 100.0), image.reshape(1, 3, 10, 10)},
            new INDArray[]{Nd4j.ones(1, 1)});
    myScaler = new ImageMultiPreProcessingScaler(0.0, 1.0, 16, new int[]{1});
    //So this should scale to 0.5,0.25 and 1;
    expected = image.mul(0);
    expected.slice(0, 0).addi(0.5 / 256);
    expected.slice(1, 0).addi(0.25 / 256);
    expected.slice(2, 0).addi(1.0 / 256);
    myScaler.transform(ds);
    assertEquals(Nd4j.valueArrayOf(10, 100.0), ds.getFeatures(0));
    assertTrue(Transforms.abs(ds.getFeatures(1).sub(expected)).maxNumber().doubleValue() <= 0.01);

    //So this should not change the value
    INDArray before = ds.getFeatures(1).dup();
    myScaler = new ImageMultiPreProcessingScaler(0.0, 1.0, new int[]{1});
    myScaler.transform(ds);
    assertTrue(Transforms.abs(ds.getFeatures(1).sub(before)).maxNumber().doubleValue() <= 0.0001);

    //Scaling back up should give the same results
    myScaler = new ImageMultiPreProcessingScaler(0.0, (256.0 * 256 * 256 - 1), new int[]{1});
    myScaler.transform(ds);
    assertTrue(Transforms.abs(ds.getFeatures(1).sub(image)).maxNumber().doubleValue() <= 1);

    //Revert:
    before = orig.dup();
    myScaler = new ImageMultiPreProcessingScaler(0.0, 1.0, 1, new int[]{1});
    MultiDataSet beforeDS = new MultiDataSet(new INDArray[]{null, before}, new INDArray[]{null});
    myScaler.transform(beforeDS);
    myScaler.revertFeatures(beforeDS.getFeatures());
    assertEquals(orig, before);
}
 
Example 19
Source File: WeightInitXavierLegacy.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray init(double fanIn, double fanOut, long[] shape, char order, INDArray paramView) {
    Nd4j.randn(paramView).divi(FastMath.sqrt(shape[0] + shape[1]));
    return paramView.reshape(order, shape);
}
 
Example 20
Source File: WeightInitRelu.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray init(double fanIn, double fanOut, long[] shape, char order, INDArray paramView) {
    Nd4j.randn(paramView).muli(FastMath.sqrt(2.0 / fanIn)); //N(0, 2/nIn)
    return paramView.reshape(order, shape);
}