org.nd4j.linalg.factory.Nd4j Java Examples

The following examples show how to use org.nd4j.linalg.factory.Nd4j. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SameDiffTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testScatterDiv() {
    INDArray arr1 = Nd4j.ones(3, 3);
    INDArray arr2 = Nd4j.createFromArray(0, 1);
    INDArray arr3 = Nd4j.ones(2, 3).assign(2);
    INDArray expected = Nd4j.create(new float[]{0.5f, 0.5f, 0.5f,
                    0.5f, 0.5f, 0.5f,
                    1.0f, 1.0f, 1.0f},
            new long[]{3, 3}).castTo(Nd4j.defaultFloatingPointType());

    SameDiff sd = SameDiff.create();
    SDVariable refs = sd.var("refs", arr1);
    SDVariable idxs = sd.constant("idxs", arr2);
    SDVariable upds = sd.placeHolder("upds", arr3.dataType(), arr3.shape());
    upds.setArray(arr3);

    SDVariable result = sd.scatterDiv(refs, idxs, upds);
    assertArrayEquals(new long[]{3, 3}, result.eval().shape());
    assertEquals(expected, result.eval());
}
 
Example #2
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testNormMax() {

    SameDiff sameDiff = SameDiff.create();

    INDArray in = Nd4j.linspace(1, 12, 12).reshape(3, 4);
    SDVariable input = sameDiff.var(in);
    INDArray expected = Nd4j.createFromArray(new double[]{
            9.0000,   10.0000,   11.0000,   12.0000
    });

    SDVariable output = new NormMax(sameDiff, input, false, new int[]{0}).outputVariable();

    TestCase tc = new TestCase(sameDiff)
            .gradientCheck(true)
            .expectedOutput(output.name(), expected);

    String err = OpValidation.validate(tc);
    assertNull(err);
}
 
Example #3
Source File: CheckUtil.java    From nd4j with Apache License 2.0 6 votes vote down vote up
public static boolean checkDivManually(INDArray first, INDArray second, double maxRelativeDifference,
                double minAbsDifference) {
    //No apache commons element-wise division, but can do this manually

    INDArray result = first.div(second);
    long[] shape = first.shape();

    INDArray expected = Nd4j.zeros(first.shape());

    for (int i = 0; i < shape[0]; i++) {
        for (int j = 0; j < shape[1]; j++) {
            double v = first.getDouble(i, j) / second.getDouble(i, j);
            expected.putScalar(new int[] {i, j}, v);
        }
    }
    if (!checkShape(expected, result))
        return false;
    boolean ok = checkEntries(expected, result, maxRelativeDifference, minAbsDifference);
    if (!ok) {
        INDArray onCopies = Shape.toOffsetZeroCopy(first).mul(Shape.toOffsetZeroCopy(second));
        printFailureDetails(first, second, expected, result, onCopies, "div");
    }
    return ok;
}
 
Example #4
Source File: ShufflesTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
public boolean compareColumn(INDArray newData) {
    float[] newMap = measureState(newData);

    if (newMap.length != map.length) {
        System.out.println("Different map lengths");
        return false;
    }

    if (Arrays.equals(map, newMap)) {
        System.out.println("Maps are equal");
        return false;
    }

    for (int x = 0; x < newData.rows(); x++) {
        INDArray column = newData.getColumn(x);
        double val = column.getDouble(0);
        for (int y = 0; y < column.lengthLong(); y++ ) {
            if (Math.abs(column.getFloat(y) - val) > Nd4j.EPS_THRESHOLD) {
                System.out.print("Different data in a column: " + column.getFloat(y));
                return false;
            }
        }
    }

    return true;
}
 
Example #5
Source File: PreProcessor3D4DTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
public Construct4dDataSet(int nExamples, int nChannels, int height, int width) {

            INDArray allImages = Nd4j.rand(new int[] {nExamples, nChannels, height, width});
            allImages.get(NDArrayIndex.all(), NDArrayIndex.point(1), NDArrayIndex.all(), NDArrayIndex.all()).muli(100)
                            .addi(200);
            allImages.get(NDArrayIndex.all(), NDArrayIndex.point(2), NDArrayIndex.all(), NDArrayIndex.all()).muli(0.001)
                            .subi(10);

            INDArray labels = Nd4j.linspace(1, nChannels, nChannels).reshape(nChannels, 1);
            sampleDataSet = new DataSet(allImages, labels);

            expectedMean = allImages.mean(0, 2, 3);
            expectedStd = allImages.std(0, 2, 3);

            expectedLabelMean = labels.mean(0);
            expectedLabelStd = labels.std(0);

            expectedMin = allImages.min(0, 2, 3);
            expectedMax = allImages.max(0, 2, 3);
        }
 
Example #6
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDepthToSpace() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 128;
    int blockSize = 4;
    int[] inputShape = new int[]{miniBatch, 2, 2, blockSize * blockSize};

    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 2 * blockSize, 2 * blockSize, 1);
    DynamicCustomOp op = new DepthToSpace(input, expOut, blockSize, DataFormat.NHWC);
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.cnn().depthToSpace("dts", sdInput, blockSize, DataFormat.NHWC);
    SDVariable loss = sd.mean("loss", t);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("dts", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example #7
Source File: AggregatesTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBatchedAggregate1() throws Exception {
    INDArray arrayX1 = Nd4j.ones(10);
    INDArray arrayY1 = Nd4j.zeros(10);

    INDArray arrayX2 = Nd4j.ones(10);
    INDArray arrayY2 = Nd4j.zeros(10);

    INDArray exp1 = Nd4j.create(10).assign(1f);
    INDArray exp2 = Nd4j.create(10).assign(1f);

    AggregateAxpy axpy1 = new AggregateAxpy(arrayX1, arrayY1, 1.0f);
    AggregateAxpy axpy2 = new AggregateAxpy(arrayX2, arrayY2, 1.0f);

    List<Aggregate> batch = new ArrayList<>();
    batch.add(axpy1);
    batch.add(axpy2);

    Nd4j.getExecutioner().exec(batch);

    assertEquals(exp1, arrayY1);
    assertEquals(exp2, arrayY2);
}
 
Example #8
Source File: BackTrackLineSearchTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSingleMaxLineSearch() throws Exception {
    double score1, score2;

    OutputLayer layer = getIrisLogisticLayerConfig(Activation.SOFTMAX, 100,
                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD);
    int nParams = (int)layer.numParams();
    layer.setBackpropGradientsViewArray(Nd4j.create(1, nParams));
    layer.setInput(irisData.getFeatures(), LayerWorkspaceMgr.noWorkspaces());
    layer.setLabels(irisData.getLabels());
    layer.computeGradientAndScore(LayerWorkspaceMgr.noWorkspaces());
    score1 = layer.score();

    BackTrackLineSearch lineSearch =
                    new BackTrackLineSearch(layer, new NegativeDefaultStepFunction(), layer.getOptimizer());
    double step = lineSearch.optimize(layer.params(), layer.gradient().gradient(), layer.gradient().gradient(), LayerWorkspaceMgr.noWorkspacesImmutable());

    assertEquals(1.0, step, 1e-3);
}
 
Example #9
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testILossFunctionGetsSet() {
    ILossFunction lossFunction = new LossMCXENT(Nd4j.create(new float[] {1f, 2f}, new long[]{1,2}));

    MultiLayerConfiguration expected =
                    new NeuralNetConfiguration.Builder().updater(new Sgd(0.005)).seed(12345).list()
                                    .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                    .layer(1, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(2,
                                                    new OutputLayer.Builder().lossFunction(lossFunction)
                                                            .activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
                                    .build();

    MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
                    .addLayer(new DenseLayerSpace.Builder().nIn(10).nOut(10).build(), new FixedValue<>(2)) //2 identical layers
                    .addLayer(new OutputLayerSpace.Builder().iLossFunction(lossFunction).activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
                    .build();

    int nParams = mls.numParameters();
    assertEquals(0, nParams);

    MultiLayerConfiguration conf = mls.getValue(new double[0]).getMultiLayerConfiguration();

    assertEquals(expected, conf);
}
 
Example #10
Source File: BaseLevel2.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
 * spr performs a rank-1 update of an n-by-n packed symmetric matrix a:
 * a := alpha*x*x' + a.
 *
 * @param order
 * @param Uplo
 * @param alpha
 * @param X
 * @param Ap
 */
@Override
public void spr(char order, char Uplo, double alpha, INDArray X, INDArray Ap) {
    if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL)
        OpProfiler.getInstance().processBlasCall(false, Ap, X);


    if (X.length() > Integer.MAX_VALUE)
        throw new ND4JArraySizeException();

    if (X.data().dataType() == DataType.DOUBLE) {
        DefaultOpExecutioner.validateDataType(DataType.DOUBLE, X);
        dspr(order, Uplo, (int) X.length(), alpha, X, X.stride(-1), Ap);
    } else {
        DefaultOpExecutioner.validateDataType(DataType.FLOAT, X);
        sspr(order, Uplo, (int) X.length(), (float) alpha, X, X.stride(-1), Ap);
    }

    OpExecutionerUtil.checkForAny(Ap);
}
 
Example #11
Source File: BasicWorkspaceTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMmap2() throws Exception {
    // we don't support MMAP on cuda yet
    if (Nd4j.getExecutioner().getClass().getName().toLowerCase().contains("cuda"))
        return;

    File tmp = File.createTempFile("tmp", "fdsfdf");
    tmp.deleteOnExit();
    Nd4jWorkspace.fillFile(tmp, 100000);

    WorkspaceConfiguration mmap = WorkspaceConfiguration.builder()
            .policyLocation(LocationPolicy.MMAP)
            .tempFilePath(tmp.getAbsolutePath())
            .build();

    MemoryWorkspace ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(mmap, "M3");

    INDArray mArray = Nd4j.create(100);
    mArray.assign(10f);

    assertEquals(1000f, mArray.sumNumber().floatValue(), 1e-5);

    ws.notifyScopeLeft();
}
 
Example #12
Source File: BaseComplexNDArray.java    From nd4j with Apache License 2.0 6 votes vote down vote up
/**
 * Returns an ndarray with 1 if the element is epsilon equals
 *
 * @param other the number to compare
 * @return a copied ndarray with the given
 * binary conditions
 */
@Override
public IComplexNDArray epsi(IComplexNumber other) {
    IComplexNDArray linear = linearView();
    double otherVal = other.realComponent().doubleValue();
    for (int i = 0; i < linearView().length(); i++) {
        IComplexNumber n = linear.getComplex(i);
        double real = n.realComponent().doubleValue();
        double diff = Math.abs(real - otherVal);
        if (diff <= Nd4j.EPS_THRESHOLD)
            linear.putScalar(i, Nd4j.createDouble(1, 0));
        else
            linear.putScalar(i, Nd4j.createDouble(0, 0));
    }

    return this;
}
 
Example #13
Source File: FailingSameDiffTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testExecutionDifferentShapesTransform(){
    OpValidationSuite.ignoreFailing();
    SameDiff sd = SameDiff.create();
    SDVariable in = sd.var("in", Nd4j.linspace(1,12,12, DataType.DOUBLE).reshape(3,4));

    SDVariable tanh = sd.math().tanh(in);
    INDArray exp = Transforms.tanh(in.getArr(), true);

    INDArray out = tanh.eval();
    assertEquals(exp, out);

    //Now, replace with minibatch 5:
    in.setArray(Nd4j.linspace(1,20,20, DataType.DOUBLE).reshape(5,4));
    INDArray out2 = tanh.eval();
    assertArrayEquals(new long[]{5,4}, out2.shape());

    exp = Transforms.tanh(in.getArr(), true);
    assertEquals(exp, out2);
}
 
Example #14
Source File: CyclicWorkspaceTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testBasicMechanics_1() {
        val fShape = new long[]{128, 784};
        val lShape = new long[] {128, 10};
        val prefetchSize = 24;
        val configuration = WorkspaceConfiguration.builder().minSize(10 * 1024L * 1024L)
                .overallocationLimit(prefetchSize + 1).policyReset(ResetPolicy.ENDOFBUFFER_REACHED)
                .policyLearning(LearningPolicy.FIRST_LOOP).policyAllocation(AllocationPolicy.OVERALLOCATE)
                .policySpill(SpillPolicy.REALLOCATE).build();

        for (int e = 0; e < 100; e++) {
            try (val ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(configuration, "randomNameHere" + 119)) {
                val fArray = Nd4j.create(fShape).assign(e);
                val lArray = Nd4j.create(lShape).assign(e);

//                log.info("Current offset: {}; Current size: {};", ws.getCurrentOffset(), ws.getCurrentSize());
            }
        }
    }
 
Example #15
Source File: Float16.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Override
protected CompressedDataBuffer compressPointer(DataBuffer.TypeEx srcType, Pointer srcPointer, int length,
                int elementSize) {

    BytePointer ptr = new BytePointer(length * 2);
    CompressionDescriptor descriptor = new CompressionDescriptor();
    descriptor.setCompressedLength(length * 2);
    descriptor.setOriginalLength(length * elementSize);
    descriptor.setOriginalElementSize(elementSize);
    descriptor.setNumberOfElements(length);

    descriptor.setCompressionAlgorithm(getDescriptor());
    descriptor.setCompressionType(getCompressionType());

    CompressedDataBuffer buffer = new CompressedDataBuffer(ptr, descriptor);

    Nd4j.getNDArrayFactory().convertDataEx(srcType, srcPointer, DataBuffer.TypeEx.FLOAT16, ptr, length);

    return buffer;
}
 
Example #16
Source File: MinMaxSerializerStrategy.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public NormalizerMinMaxScaler restore(@NonNull InputStream stream) throws IOException {
    DataInputStream dis = new DataInputStream(stream);

    boolean fitLabels = dis.readBoolean();
    double targetMin = dis.readDouble();
    double targetMax = dis.readDouble();

    NormalizerMinMaxScaler result = new NormalizerMinMaxScaler(targetMin, targetMax);
    result.fitLabel(fitLabels);
    result.setFeatureStats(Nd4j.read(dis), Nd4j.read(dis));
    if (fitLabels) {
        result.setLabelStats(Nd4j.read(dis), Nd4j.read(dis));
    }

    return result;
}
 
Example #17
Source File: ConvolutionUtils.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Given a mask array for a 1D CNN layer of shape [minibatch, sequenceLength], reduce the mask according to the 1D CNN layer configuration.
 * Unlike RNN layers, 1D CNN layers may down-sample the data; consequently, we need to down-sample the mask array
 * in the same way, to maintain the correspondence between the masks and the output activations
 *
 * @param in       Input size
 * @param kernel   Kernel size
 * @param stride   Stride
 * @param padding  Padding
 * @param dilation Dilation
 * @param cm       Convolution mode
 * @return Reduced mask
 */
public static INDArray cnn1dMaskReduction(INDArray in, int kernel, int stride, int padding, int dilation, ConvolutionMode cm){
    Preconditions.checkState(in.rank()==2, "Rank must be 2 for cnn1d mask array - shape ", in.shape());
    if((cm == ConvolutionMode.Same || cm == ConvolutionMode.Causal) && stride == 1 ){
        return in;
    }

    if(!Shape.hasDefaultStridesForShape(in)){
        in = in.dup();
    }

    INDArray reshaped4d = in.reshape(in.size(0), 1, in.size(1), 1);

    int[] outSize;
    int[] pad = null;
    int[] k = new int[]{kernel,1};
    int[] s = new int[]{stride, 1};
    int[] d = new int[]{dilation, 1};
    if (cm == ConvolutionMode.Same || cm == ConvolutionMode.Causal) {
        outSize = ConvolutionUtils.getOutputSize(reshaped4d, k, s, null, cm, d, CNN2DFormat.NCHW); //Also performs validation
    } else {
        pad = new int[]{padding, 0};
        outSize = ConvolutionUtils.getOutputSize(reshaped4d, k, s, pad, cm, d, CNN2DFormat.NCHW); //Also performs validation
    }
    int outH = outSize[0];

    INDArray output = Nd4j.createUninitialized(new int[]{(int)in.size(0), 1, outH, 1}, 'c');

    DynamicCustomOp op = new MaxPooling2D(reshaped4d, output, Pooling2DConfig.builder()
            .kH(k[0]).kW(k[1])
            .sH(s[0]).sW(s[1])
            .pH(pad == null ? 0 : pad[0]).pW(pad == null ? 0 : pad[1])
            .dH(d[0]).dW(d[1])
            .isSameMode(cm == ConvolutionMode.Same || cm == ConvolutionMode.Causal)
            .isNHWC(false)
            .build());

    Nd4j.getExecutioner().exec(op);
    return output.reshape('c', in.size(0), outH);
}
 
Example #18
Source File: BaseNDArray.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray addi(Number n, INDArray result) {
    validateNumericalArray("addi", false);
    if (Double.isNaN(n.doubleValue()))
        n = Nd4j.EPS_THRESHOLD;

    Nd4j.getExecutioner().exec(new ScalarAdd(this, null, result, n));
    return result;
}
 
Example #19
Source File: HalfOpsTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testHasum1() throws Exception {
    INDArray array1 = Nd4j.create(new float[] {1.0f, -1.0f, 1.0f, -1.0f, -2.0f, 2.0f, -2.0f});

    double sum = Nd4j.getBlasWrapper().asum(array1);

    assertEquals(10.0f, sum, 0.01f);
}
 
Example #20
Source File: MixedDataTypesTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBasicOps_5() {
    val arrayX = Nd4j.create(new int[]{1, 2, 3, 4}, new  long[]{4}, DataType.INT);

    val result = arrayX.meanNumber().floatValue();

    assertEquals(2.5f, result, 1e-5);
}
 
Example #21
Source File: AdaGrad.java    From nd4j with Apache License 2.0 5 votes vote down vote up
public INDArray getGradient(INDArray gradient, int slice, int[] shape) {
    boolean historicalInitialized = false;
    INDArray sqrtHistory;

    if (this.historicalGradient == null) {
        this.historicalGradient = Nd4j.zeros(shape).add(epsilon);
        historicalInitialized = true;
    } else if (!this.historicalGradient.isVector()
                    && this.historicalGradient.slice(slice).length() != gradient.length())
        throw new IllegalArgumentException("Illegal gradient");

    if (historicalGradient.isVector())
        sqrtHistory = sqrt(historicalGradient);
    else
        sqrtHistory = !historicalInitialized ? sqrt(historicalGradient.slice(slice)) : historicalGradient;
    INDArray learningRates;
    try {
        learningRates = sqrtHistory.rdivi(learningRate);
    } catch (ArithmeticException ae) {
        learningRates = sqrtHistory.rdivi(learningRate + epsilon);
    }
    if (gradient.length() != learningRates.length())
        gradient.muli(learningRates.slice(slice));
    else
        gradient.muli(learningRates);

    this.historicalGradient.slice(slice).addi(gradient.mul(gradient));
    numIterations++;

    //ensure no zeros
    return gradient;
}
 
Example #22
Source File: SpTree.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 *
 * Compute edge forces using barnes hut
 * @param rowP a vector
 * @param colP
 * @param valP
 * @param N the number of elements
 * @param posF the positive force
 */
public void computeEdgeForces(INDArray rowP, INDArray colP, INDArray valP, int N, INDArray posF) {
    if (!rowP.isVector())
        throw new IllegalArgumentException("RowP must be a vector");

    // Loop over all edges in the graph
    // just execute native op
    Nd4j.exec(new BarnesEdgeForces(rowP, colP, valP, data, N, posF));

    /*
    INDArray buf = Nd4j.create(data.dataType(), this.D);
    double D;
    for (int n = 0; n < N; n++) {
        INDArray slice = data.slice(n);
        for (int i = rowP.getInt(n); i < rowP.getInt(n + 1); i++) {

            // Compute pairwise distance and Q-value
            slice.subi(data.slice(colP.getInt(i)), buf);

            D = 1.0 + Nd4j.getBlasWrapper().dot(buf, buf);
            D = valP.getDouble(i) / D;

            // Sum positive force
            posF.slice(n).addi(buf.muli(D));
        }
    }
    */
}
 
Example #23
Source File: OpExecutionerTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testStdev() {
    INDArray arr = Nd4j.create(new float[] {0.9296161f, 0.31637555f, 0.1839188f}, new int[] {1, 3}, ordering());
    double stdev = arr.stdNumber(true).doubleValue();


    val standardDeviation = new org.apache.commons.math3.stat.descriptive.moment.StandardDeviation(true);
    double exp = standardDeviation.evaluate(arr.toDoubleVector());
    assertEquals(exp, stdev, 1e-7f);


    double stdev2 = arr.std(true, 1).getDouble(0);
    assertEquals(stdev, stdev2, 1e-3);
}
 
Example #24
Source File: SlicingTestsC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testSliceAssertion() {
        INDArray arr = Nd4j.linspace(1, 30, 30).reshape(3, 5, 2);
        INDArray firstRow = arr.slice(0).slice(0);
//        for (int i = 0; i < firstRow.length(); i++) {
//            System.out.println(firstRow.getDouble(i));
//        }
//        System.out.println(firstRow);
    }
 
Example #25
Source File: TestEarlyStoppingSparkCompGraph.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBadTuning() {
    //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition

    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(2.0)) //Intentionally huge LR
                    .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
                    .addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
                                    .lossFunction(LossFunctions.LossFunction.MSE).build(), "in")
                    .setOutputs("0").build();
    ComputationGraph net = new ComputationGraph(conf);
    net.setListeners(new ScoreIterationListener(5));

    JavaRDD<DataSet> irisData = getIris();
    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5000))
                    .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(2, TimeUnit.MINUTES),
                                    new MaxScoreIterationTerminationCondition(7.5)) //Initial score is ~2.5
                    .scoreCalculator(new SparkLossCalculatorComputationGraph(
                                    irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc()))
                    .modelSaver(saver).build();

    TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);

    IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm,
                    esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
    EarlyStoppingResult result = trainer.fit();

    assertTrue(result.getTotalEpochs() < 5);
    assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition,
                    result.getTerminationReason());
    String expDetails = new MaxScoreIterationTerminationCondition(7.5).toString();
    assertEquals(expDetails, result.getTerminationDetails());
}
 
Example #26
Source File: EndlessTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testStdDevForeverFull(){
    INDArray arr = Nd4j.ones(100,100);

    for (int i = 0; i < RUN_LIMIT; i++ ) {
        arr.stdNumber();
    }
}
 
Example #27
Source File: SameDiffTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testAutoBroadcastAddMatrixector() {
    SameDiff sameDiff = SameDiff.create();
    INDArray arr = Nd4j.linspace(1, 4, 4).reshape(2, 2);
    INDArray row = Nd4j.ones(2);
    INDArray assertion = arr.add(1.0);
    SDVariable left = sameDiff.var("arr", arr);
    SDVariable right = sameDiff.var("row", row);
    SDVariable test = left.add(right);
    sameDiff.exec();
    assertEquals(assertion, test.getArr());
}
 
Example #28
Source File: TestVariableLengthTS.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * CPU ONLY VERSION FOR TESTING
 */
public static INDArray reverseTimeSeries(INDArray in){
    if(in == null){
        return null;
    }
    INDArray out = Nd4j.createUninitialized(in.shape(), 'f');
    CustomOp op = DynamicCustomOp.builder("reverse")
            .addIntegerArguments(2)
            .addInputs(in)
            .addOutputs(out)
            .callInplace(false)
            .build();
    Nd4j.getExecutioner().exec(op);
    return out;
}
 
Example #29
Source File: WorkspaceTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testScalarOutputCase() {
    for (WorkspaceMode ws : WorkspaceMode.values()) {
        log.info("WorkspaceMode = " + ws);

        Nd4j.getRandom().setSeed(12345);
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .weightInit(WeightInit.XAVIER)
                .seed(12345)
                .trainingWorkspaceMode(ws).inferenceWorkspaceMode(ws)
                .list()
                .layer(new OutputLayer.Builder().nIn(3).nOut(1).activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.XENT).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        INDArray input = Nd4j.linspace(1, 3, 3, Nd4j.dataType()).reshape(1,3);
        INDArray out = net.output(input);
        INDArray out2 = net.output(input);

        assertEquals(out2, out);

        assertFalse(out.isAttached());
        assertFalse(out2.isAttached());

        Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread();
    }
}
 
Example #30
Source File: ShapeResolutionTestsC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testVectorIndexPointPointOutOfRange() {
    INDArray zeros = Nd4j.zeros(1, 4);
    INDArrayIndex x = NDArrayIndex.point(0);
    INDArrayIndex y = NDArrayIndex.point(4);
    INDArray value = Nd4j.ones(1, 1);
    try {
        zeros.put(new INDArrayIndex[] {x, y}, value);
        fail("Out of range index should throw an IllegalArgumentException");
    } catch (IllegalArgumentException e) {
        //do nothing
    }
}