Java Code Examples for org.nd4j.linalg.factory.Nd4j#randn()

The following examples show how to use org.nd4j.linalg.factory.Nd4j#randn() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Nd4jApacheAdapterUtilsUnitTest.java    From gatk-protected with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
@Test
public void testINDArrayToApacheVector() {
    final INDArray rowArrCOrder = Nd4j.randn('c', new int[] {1, 5});
    final INDArray rowArrFOrder = Nd4j.randn('f', new int[] {1, 5});
    final INDArray colArrCOrder = Nd4j.randn('c', new int[] {5, 1});
    final INDArray colArrFOrder = Nd4j.randn('f', new int[] {5, 1});

    assertINDArrayToApacheVectorCorrectness(rowArrCOrder);
    assertINDArrayToApacheVectorCorrectness(rowArrFOrder);
    assertINDArrayToApacheVectorCorrectness(colArrCOrder);
    assertINDArrayToApacheVectorCorrectness(colArrFOrder);

    /* test on INDArray views */
    assertINDArrayToApacheVectorCorrectness(rowArrCOrder.get(NDArrayIndex.all(), NDArrayIndex.interval(2, 4)));
    assertINDArrayToApacheVectorCorrectness(rowArrFOrder.get(NDArrayIndex.all(), NDArrayIndex.interval(2, 4)));
    assertINDArrayToApacheVectorCorrectness(colArrCOrder.get(NDArrayIndex.interval(2, 4), NDArrayIndex.all()));
    assertINDArrayToApacheVectorCorrectness(colArrFOrder.get(NDArrayIndex.interval(2, 4), NDArrayIndex.all()));
}
 
Example 2
Source File: TestRandomProjection.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testEmbedding(){
    INDArray z1 = Nd4j.randn(2000, 400);
    INDArray z2 = z1.dup();
    INDArray result = Transforms.allEuclideanDistances(z1, z2, 1);

    RandomProjection rp = new RandomProjection(0.5);
    INDArray zp = rp.project(z1);
    INDArray zp2 = zp.dup();
    INDArray projRes = Transforms.allEuclideanDistances(zp, zp2, 1);

    // check that the automatically tuned values for the density respect the
    // contract for eps: pairwise distances are preserved according to the
    // Johnson-Lindenstrauss lemma
    INDArray ratios = projRes.div(result);

    for (int i = 0; i < ratios.length(); i++){
        double val = ratios.getDouble(i);
        // this avoids the NaNs we get along the diagonal
        if (val == val) {
            assertTrue(ratios.getDouble(i) < 1.5);
        }
    }

}
 
Example 3
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSpaceToDepth() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 128;
    int blockSize = 4;
    int[] inputShape = new int[]{miniBatch, 2 * blockSize, 2 * blockSize, 1};

    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 2, 2, blockSize * blockSize);
    DynamicCustomOp op = new SpaceToDepth(input, expOut, blockSize, DataFormat.NHWC);
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.cnn().spaceToDepth("std", sdInput, blockSize, DataFormat.NHWC);
    //new SpaceToDepth(sd, sdInput, blockSize, dataFormat).outputVariable();
    SDVariable loss = sd.mean("loss", t);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("std", expOut)
            .gradientCheck(true));
    assertNull(err);
}
 
Example 4
Source File: NDLossTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSigmoidCrossEntropy() {
    SameDiff sd = SameDiff.create();

    int nOut = 4;
    int minibatch = 10;
    SDVariable predictions = sd.var("in", DataType.DOUBLE, minibatch, nOut);
    SDVariable labels = sd.var("labels", DataType.DOUBLE, -1, nOut);

    INDArray wArr = Nd4j.create(new double[][]{
            {0, 0, 0, 0}, {0, 0, 1, 1}, {1, 1, 0, 0}, {1, 1, 1, 1}, {1, 1, 1, 1},
            {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}});
    SDVariable w = sd.var("weights", wArr);

    LossReduce reduction = LossReduce.MEAN_BY_NONZERO_WEIGHT_COUNT;

    INDArray predictionsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    INDArray labelsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    double labelSmoothing = 0.01;

    SDVariable loss = sd.loss().sigmoidCrossEntropy("loss", labels, predictions, w, reduction, labelSmoothing);
    SDVariable loss2 = sd.loss().sigmoidCrossEntropy("loss2", labels, predictions,
            null, reduction, labelSmoothing);
    sd.associateArrayWithVariable(predictionsArr, predictions);
    sd.associateArrayWithVariable(labelsArr, labels);

    INDArray y_exp = loss.eval();
    INDArray y_exp2 = loss2.eval();

    INDArray y = Nd4j.loss().sigmoidCrossEntropy(labelsArr, predictionsArr, wArr, reduction, labelSmoothing);
    INDArray y2 = Nd4j.loss().sigmoidCrossEntropy(labelsArr, predictionsArr, null, reduction, labelSmoothing);
    assertEquals(y_exp, y);
    assertEquals(y_exp2, y2);
}
 
Example 5
Source File: NDLossTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMeanPairwiseSquaredError() {
    SameDiff sd = SameDiff.create();

    int nOut = 4;
    int minibatch = 10;
    SDVariable predictions = sd.var("in", DataType.DOUBLE, minibatch, nOut);
    SDVariable labels = sd.var("labels", DataType.DOUBLE, -1, nOut);

    INDArray wArr = Nd4j.create(new double[][]{
            {0, 0, 0, 0}, {0, 0, 1, 1}, {1, 1, 0, 0}, {1, 1, 1, 1}, {1, 1, 1, 1},
            {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}});
    SDVariable w = sd.var("weights", wArr);

    LossReduce reduction = LossReduce.MEAN_BY_NONZERO_WEIGHT_COUNT;

    INDArray predictionsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    INDArray labelsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);

    SDVariable loss = sd.loss().meanPairwiseSquaredError("loss", labels, predictions, w, reduction);
    SDVariable loss2 = sd.loss().meanPairwiseSquaredError("loss2", labels, predictions,
            null, reduction);
    sd.associateArrayWithVariable(predictionsArr, predictions);
    sd.associateArrayWithVariable(labelsArr, labels);

    INDArray y_exp = loss.eval();
    INDArray y_exp2 = loss2.eval();

    INDArray y = Nd4j.loss().meanPairwiseSquaredError(labelsArr, predictionsArr, wArr, reduction);
    INDArray y2 = Nd4j.loss().meanPairwiseSquaredError(labelsArr, predictionsArr, null, reduction);
    assertEquals(y_exp, y);
    assertEquals(y_exp2, y2);
}
 
Example 6
Source File: BinarySerdeTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testReadWriteFile() throws Exception {
    File tmpFile = new File(System.getProperty("java.io.tmpdir"),
                    "ndarraytmp-" + UUID.randomUUID().toString() + " .bin");
    tmpFile.deleteOnExit();
    INDArray rand = Nd4j.randn(5, 5);
    BinarySerde.writeArrayToDisk(rand, tmpFile);
    INDArray fromDisk = BinarySerde.readFromDisk(tmpFile);
    assertEquals(rand, fromDisk);
}
 
Example 7
Source File: TestRandomProjection.java    From nd4j with Apache License 2.0 5 votes vote down vote up
private void testRandomProjectionDeterministicForSameShape(){
    INDArray z1 = Nd4j.randn(1000, 500);
    RandomProjection rp = new RandomProjection(50);
    INDArray res1 = Nd4j.zeros(10000, 442);
    rp.projecti(z1, res1);

    INDArray res2 = Nd4j.zeros(10000, 442);
    rp.projecti(z1, res2);

    assertEquals(res1, res2);
}
 
Example 8
Source File: NDArrayTestsFortran.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testRand() {
    INDArray rand = Nd4j.randn(5, 5);
    Nd4j.getDistributions().createUniform(0.4, 4).sample(5);
    Nd4j.getDistributions().createNormal(1, 5).sample(10);
    //Nd4j.getDistributions().createBinomial(5, 1.0).sample(new long[]{5, 5});
    //Nd4j.getDistributions().createBinomial(1, Nd4j.ones(5, 5)).sample(rand.shape());
    Nd4j.getDistributions().createNormal(rand, 1).sample(rand.shape());
}
 
Example 9
Source File: PCA.java    From nd4j with Apache License 2.0 5 votes vote down vote up
/**
 * Generates a set of <i>count</i> random samples with the same variance and mean and eigenvector/values
 * as the data set used to initialize the PCA object, with same number of features <i>N</i>.
 * @param count The number of samples to generate
 * @return A matrix of size <i>count</i> rows by <i>N</i> columns
 */
public INDArray generateGaussianSamples(long count) {
    INDArray samples = Nd4j.randn(new long[] {count, eigenvalues.columns()});
    INDArray factors = Transforms.pow(eigenvalues, -0.5, true);
    samples.muliRowVector(factors);
    return Nd4j.tensorMmul(eigenvectors, samples, new int[][] {{1}, {1}}).transposei().addiRowVector(mean);
}
 
Example 10
Source File: RandomTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSignatures1() throws Exception {

    for (int x = 0; x < 100; x++) {
        INDArray z1 = Nd4j.randn(128, 1, 5325235);
        INDArray z2 = Nd4j.randn(128, 1, 5325235);

        assertEquals(z1, z2);
    }
}
 
Example 11
Source File: BinarySerdeTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testReadShapeFile() throws Exception {
    File tmpFile = new File(System.getProperty("java.io.tmpdir"),
                    "ndarraytmp-" + UUID.randomUUID().toString() + " .bin");
    tmpFile.deleteOnExit();
    INDArray rand = Nd4j.randn(5, 5);
    BinarySerde.writeArrayToDisk(rand, tmpFile);
    DataBuffer buffer = BinarySerde.readShapeFromDisk(tmpFile);

    assertArrayEquals(rand.shapeInfoDataBuffer().asLong(), buffer.asLong());
}
 
Example 12
Source File: BinarySerdeTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testReadWriteFile() throws Exception {
    File tmpFile = new File(System.getProperty("java.io.tmpdir"),
                    "ndarraytmp-" + UUID.randomUUID().toString() + " .bin");
    tmpFile.deleteOnExit();
    INDArray rand = Nd4j.randn(5, 5);
    BinarySerde.writeArrayToDisk(rand, tmpFile);
    INDArray fromDisk = BinarySerde.readFromDisk(tmpFile);
    assertEquals(rand, fromDisk);
}
 
Example 13
Source File: WeightInitUtilTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testXavierLegacy() {
    INDArray params = Nd4j.create(shape, 'f');
    INDArray weightsActual =
                    WeightInitUtil.initWeights(fanIn, fanOut, shape, WeightInit.XAVIER_LEGACY, dist, params);

    // expected calculation
    Nd4j.getRandom().setSeed(123);
    INDArray weightsExpected = Nd4j.randn('f', shape);
    weightsExpected.muli(FastMath.sqrt(1.0 / (fanIn + fanOut)));

    assertEquals(weightsExpected, weightsActual);
}
 
Example 14
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSpaceToBatch() {
    Nd4j.getRandom().setSeed(7331);

    int miniBatch = 4;
    int[] inputShape = new int[]{1, 2, 2, 1};

    int M = 2;
    int[] blockShape = new int[]{M, 1};
    int[] paddingShape = new int[]{M, 2};

    INDArray input = Nd4j.randn(inputShape);
    INDArray blocks = Nd4j.create(new float[]{2, 2}, blockShape);
    INDArray padding = Nd4j.create(new float[]{0, 0, 0, 0}, paddingShape);

    SameDiff sd = SameDiff.create();

    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 1, 1, 1);
    DynamicCustomOp op = DynamicCustomOp.builder("space_to_batch")
            .addInputs(input, blocks, padding)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.spaceToBatch(sdInput, new int[]{2, 2}, new int[][]{{0, 0}, {0, 0}});
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();

    if (!expOut.equals(out)) {
        log.info("space to batch failed on forward");
    }

    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 15
Source File: TestPCA.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
/**
     * Test new PCA routines, added by Luke Czapla
     */
    @Test
    public void testPCA() {
        INDArray m = Nd4j.randn(10000, 16);
        // 10000 random correlated samples of 16 features to analyze
        m.getColumn(0).muli(4.84);
        m.getColumn(1).muli(4.84);
        m.getColumn(2).muli(4.09);
        m.getColumn(1).addi(m.getColumn(2).div(2.0));
        m.getColumn(2).addi(34.286);
        m.getColumn(1).addi(m.getColumn(4));
        m.getColumn(4).subi(m.getColumn(5).div(2.0));
        m.getColumn(5).addi(3.4);
        m.getColumn(6).muli(6.0);
        m.getColumn(7).muli(0.2);
        m.getColumn(8).muli(2.0);
        m.getColumn(9).muli(6.0);
        m.getColumn(9).addi(m.getColumn(6).mul(1.0));
        m.getColumn(10).muli(0.2);
        m.getColumn(11).muli(2.0);
        m.getColumn(12).muli(0.2);
        m.getColumn(13).muli(4.0);
        m.getColumn(14).muli(3.2);
        m.getColumn(14).addi(m.getColumn(2).mul(1.0)).subi(m.getColumn(13).div(2.0));
        m.getColumn(15).muli(1.0);
        m.getColumn(13).subi(12.0);
        m.getColumn(15).addi(30.0);

        PCA myPCA = new PCA(m);
        INDArray reduced70 = myPCA.reducedBasis(0.70);
        INDArray reduced99 = myPCA.reducedBasis(0.99);
        assertTrue("Major variance differences should change number of basis vectors",
                        reduced99.columns() > reduced70.columns());
        INDArray reduced100 = myPCA.reducedBasis(1.0);
        assertTrue("100% variance coverage should include all eigenvectors", reduced100.columns() == m.columns());
        NDArrayStrings ns = new NDArrayStrings(5);
//        System.out.println("Eigenvectors:\n" + ns.format(myPCA.getEigenvectors()));
//        System.out.println("Eigenvalues:\n" + ns.format(myPCA.getEigenvalues()));
        double variance = 0.0;

        // sample 1000 of the randomly generated samples with the reduced basis set
        for (long i = 0; i < 1000; i++)
            variance += myPCA.estimateVariance(m.getRow(i), reduced70.columns());
        variance /= 1000.0;
        System.out.println("Fraction of variance using 70% variance with " + reduced70.columns() + " columns: " + variance);
        assertTrue("Variance does not cover intended 70% variance", variance > 0.70);
        // create "dummy" data with the same exact trends
        INDArray testSample = myPCA.generateGaussianSamples(10000);
        PCA analyzePCA = new PCA(testSample);
        assertTrue("Means do not agree accurately enough",
                        myPCA.getMean().equalsWithEps(analyzePCA.getMean(), 0.2 * myPCA.getMean().columns()));
        assertTrue("Covariance is not reproduced accurately enough", myPCA.getCovarianceMatrix().equalsWithEps(
                        analyzePCA.getCovarianceMatrix(), 1.0 * analyzePCA.getCovarianceMatrix().length()));
        assertTrue("Eigenvalues are not close enough", myPCA.getEigenvalues().equalsWithEps(analyzePCA.getEigenvalues(),
                        0.5 * myPCA.getEigenvalues().columns()));
        assertTrue("Eigenvectors are not close enough", myPCA.getEigenvectors()
                        .equalsWithEps(analyzePCA.getEigenvectors(), 0.1 * analyzePCA.getEigenvectors().length()));
//        System.out.println("Original cov:\n" + ns.format(myPCA.getCovarianceMatrix()) + "\nDummy cov:\n"
//                        + ns.format(analyzePCA.getCovarianceMatrix()));
        INDArray testSample2 = analyzePCA.convertBackToFeatures(analyzePCA.convertToComponents(testSample));
        assertTrue("Transformation does not work.", testSample.equalsWithEps(testSample2, 1e-5 * testSample.length()));
    }
 
Example 16
Source File: SameDiffTests.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testBooleanChecks() {
    /*
    isNonDecreasing,
     */
    Nd4j.getRandom().setSeed(12345);

    for (int i = 0; i < 3; i++) {
        SameDiff sd = SameDiff.create();

        int nOut = 4;
        int minibatch = 5;

        INDArray ia = Nd4j.randn(minibatch, nOut);

        SDVariable in1 = sd.var("in1", ia);
        INDArray expOut = Nd4j.create(new float[]{1});
        SDVariable t;

        switch (i) {
            case 0:
                t = sd.isNonDecreasing(in1);
                Nd4j.getExecutioner().exec(new IsNonDecreasing(new INDArray[]{ia}, new INDArray[]{expOut}));
                break;
            case 1:
                t = sd.isStrictlyIncreasing(in1);
                Nd4j.getExecutioner().exec(new IsStrictlyIncreasing(new INDArray[]{ia}, new INDArray[]{expOut}));
                break;
            case 2:
                t = sd.isNumericTensor(in1);
                Nd4j.getExecutioner().exec(new IsNumericTensor(new INDArray[]{ia}, new INDArray[]{expOut}));
                break;
            default:
                throw new RuntimeException();
        }

        log.info("Executing: " + i);
        INDArray out = sd.execAndEndResult();

        assertEquals(expOut, out);
    }
}
 
Example 17
Source File: SameDiffTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testPairwiseBooleanTransforms() {
    /*
    eq, neq, gt, lt, gte, lte, or, and, xor
     */
    //Test transforms (pairwise)
    Nd4j.getRandom().setSeed(12345);

    for (int i = 0; i < 11; i++) {
        SameDiff sd = SameDiff.create();

        int nOut = 4;
        int minibatch = 5;

        INDArray ia = Nd4j.randn(minibatch, nOut);
        INDArray ib = Nd4j.randn(minibatch, nOut);

        SDVariable in1 = sd.var("in1", ia);
        SDVariable in2 = sd.var("in2", ib);

        SDVariable t;
        INDArray expOut;
        switch (i) {
            case 0:
                t = sd.eq(in1, in2);
                expOut = ia.eq(ib);
                break;
            case 1:
                t = sd.neq(in1, in2);
                expOut = ia.neq(ib);
                break;
            case 2:
                t = sd.gt(in1, in2);
                expOut = ia.gt(ib);
                break;
            case 3:
                t = sd.lt(in1, in2);
                expOut = ia.lt(ib);
                break;
            case 4:
                t = sd.gte(in1, in2);
                expOut = Nd4j.create(DataType.BOOL, ia.shape());
                Nd4j.exec(new GreaterThanOrEqual(new INDArray[]{ia, ib}, new INDArray[]{expOut}));
                break;
            case 5:
                t = sd.lte(in1, in2);
                expOut = Nd4j.create(DataType.BOOL, ia.shape());
                Nd4j.exec(new LessThanOrEqual(new INDArray[]{ia, ib}, new INDArray[]{expOut}));
                break;
            case 6:
                ia = Nd4j.exec(new BernoulliDistribution(ia, 0.5));
                ib = Nd4j.exec(new BernoulliDistribution(ib, 0.5));
                t = sd.math().or(in1.castTo(DataType.BOOL), in2.castTo(DataType.BOOL));
                expOut = Transforms.or(ia, ib);
                break;
            case 7:
                t = sd.max(in1, in2);
                expOut = Nd4j.exec(new Max(ia, ib, ia.dup()))[0];
                break;
            case 8:
                t = sd.min(in1, in2);
                expOut = Nd4j.exec(new Min(ia, ib, ia.dup()))[0];
                break;
            case 9:
                ia = Nd4j.exec(new BernoulliDistribution(ia, 0.5));
                ib = Nd4j.exec(new BernoulliDistribution(ib, 0.5));
                t = sd.math().and(in1.castTo(DataType.BOOL), in2.castTo(DataType.BOOL));
                expOut = Transforms.and(ia, ib);
                break;
            case 10:
                ia = Nd4j.exec(new BernoulliDistribution(ia, 0.5));
                ib = Nd4j.exec(new BernoulliDistribution(ib, 0.5));
                t = sd.math().xor(in1.castTo(DataType.BOOL), in2.castTo(DataType.BOOL));
                expOut = Transforms.xor(ia, ib);
                break;
            default:
                throw new RuntimeException();
        }

        log.info("Executing: " + i);
        INDArray out = t.eval();

        assertEquals(expOut, out);
    }
}
 
Example 18
Source File: SameDiffTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testBooleanChecks() {
    /*
    isNonDecreasing,
     */
    Nd4j.getRandom().setSeed(12345);

    for (int i = 0; i < 3; i++) {
        SameDiff sd = SameDiff.create();

        int nOut = 4;
        int minibatch = 5;

        INDArray ia = Nd4j.randn(minibatch, nOut);

        SDVariable in1 = sd.var("in1", ia);
        INDArray expOut = Nd4j.scalar(true);
        SDVariable t;

        switch (i) {
            case 0:
                t = sd.math().isNonDecreasing(in1);
                Nd4j.exec(new IsNonDecreasing(ia, expOut));
                break;
            case 1:
                t = sd.math().isStrictlyIncreasing(in1);
                Nd4j.exec(new IsStrictlyIncreasing(ia, expOut));
                break;
            case 2:
                t = sd.isNumericTensor(in1);
                Nd4j.exec(new IsNumericTensor(new INDArray[]{ia}, new INDArray[]{expOut}));
                break;
            default:
                throw new RuntimeException();
        }

        log.info("Executing: " + i);
        INDArray out = t.eval();

        assertEquals(expOut, out);
    }
}
 
Example 19
Source File: Gan11Exemple.java    From dl4j-tutorials with MIT License 4 votes vote down vote up
private static void run1() {
        Gan11Exemple exemple = new Gan11Exemple();
        exemple.init(28, 28, 1);
        exemple.network();

        INDArray nInput = Nd4j.randn(new int[]{1, noiseSize});

        INDArray realPicInput = getRealPic(true).div(255);
        for (int i = 0; i < 10000; i++) {
            System.out.println();
            Map<String, INDArray> param1 = exemple.copy(exemple.netParams());
            exemple.trainRealDisc(realPicInput, i);
            Map<String, INDArray> param2 = exemple.copy(exemple.netParams());
//            System.out.println("param(g)=" + exemple.eq(param1, param2, true));
            System.out.println("param(d)=" + exemple.eq(param1, param2, false));
            System.out.println("disc true ret=" + exemple.discTruePridict(realPicInput));



            System.out.println("disc false ret(before)=" + exemple.discFlasePridict(nInput));
            exemple.trainBadDisc(nInput, i);

            Map<String, INDArray> param4 = exemple.copy(exemple.netParams());
//            System.out.println("param(g)=" + exemple.eq(param3, param4, true));
            System.out.println("param(d)=" + exemple.eq(param2, param4, false));
            System.out.println("disc false ret(after)=" + exemple.discFlasePridict(nInput));


            System.out.println("gen ret(before)=" + exemple.genPridict(nInput));
            GanCnnInputPreProcessor.save = true;
            exemple.trainGen(nInput, i);
            GanCnnInputPreProcessor.save = false;
            Map<String, INDArray> param3 = exemple.copy(exemple.netParams());
            System.out.println("param(g)=" + exemple.eq(param4, param3, true));
            System.out.println("param(d)=" + exemple.eq(param4, param3, false));
            System.out.println("gen ret(after)=" + exemple.genPridict(nInput));

            System.out.println();
            if (i % 100 == 0) {
                exemple.saveModel("/myself/data/code/ideajava/dl4j-myexemples/model/Model_Gan11." + i + ".zip");
            }
        }
    }
 
Example 20
Source File: Gan4Exemple.java    From dl4j-tutorials with MIT License 4 votes vote down vote up
private static INDArray getNorseInputData() {
    Random rd = new Random();
    return Nd4j.randn(new int[]{1, 10}, 1234);
}