Java Code Examples for org.nd4j.linalg.indexing.BooleanIndexing#and()

The following examples show how to use org.nd4j.linalg.indexing.BooleanIndexing#and() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RandomTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBinomialDistribution1() throws Exception {
    Random random1 = Nd4j.getRandomFactory().getNewRandomInstance(119);
    Random random2 = Nd4j.getRandomFactory().getNewRandomInstance(119);

    INDArray z1 = Nd4j.zeros(1000);
    INDArray z2 = Nd4j.zeros(1000);
    INDArray z1Dup = Nd4j.zeros(1000);

    BinomialDistribution op1 = new BinomialDistribution(z1, 5, 0.25);
    BinomialDistribution op2 = new BinomialDistribution(z2, 5, 0.25);

    Nd4j.getExecutioner().exec(op1, random1);
    Nd4j.getExecutioner().exec(op2, random2);

    assertNotEquals(z1Dup, z1);

    assertEquals(z1, z2);

    BooleanIndexing.and(z1, Conditions.lessThanOrEqual(5.0));
    BooleanIndexing.and(z1, Conditions.greaterThanOrEqual(0.0));
}
 
Example 2
Source File: RandomTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBinomialDistribution2() throws Exception {
    Random random1 = Nd4j.getRandomFactory().getNewRandomInstance(119);
    Random random2 = Nd4j.getRandomFactory().getNewRandomInstance(119);

    INDArray z1 = Nd4j.zeros(1000);
    INDArray z2 = Nd4j.zeros(1000);
    INDArray z1Dup = Nd4j.zeros(1000);

    INDArray probs = Nd4j.create(new float[] {0.25f, 0.43f, 0.55f, 0.43f, 0.25f});

    BinomialDistribution op1 = new BinomialDistribution(z1, 5, probs);
    BinomialDistribution op2 = new BinomialDistribution(z2, 5, probs);

    Nd4j.getExecutioner().exec(op1, random1);
    Nd4j.getExecutioner().exec(op2, random2);

    assertNotEquals(z1Dup, z1);

    assertEquals(z1, z2);

    BooleanIndexing.and(z1, Conditions.lessThanOrEqual(5.0));
    BooleanIndexing.and(z1, Conditions.greaterThanOrEqual(0.0));
}
 
Example 3
Source File: RandomTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBinomialDistribution1() {
    Random random1 = Nd4j.getRandomFactory().getNewRandomInstance(119);
    Random random2 = Nd4j.getRandomFactory().getNewRandomInstance(119);

    INDArray z1 = Nd4j.zeros(1000);
    INDArray z2 = Nd4j.zeros(1000);
    INDArray z1Dup = Nd4j.zeros(1000);

    BinomialDistribution op1 = new BinomialDistribution(z1, 5, 0.25);
    BinomialDistribution op2 = new BinomialDistribution(z2, 5, 0.25);

    Nd4j.getExecutioner().exec(op1, random1);
    Nd4j.getExecutioner().exec(op2, random2);

    assertNotEquals(z1Dup, z1);

    assertEquals(z1, z2);

    BooleanIndexing.and(z1, Conditions.lessThanOrEqual(5.0));
    BooleanIndexing.and(z1, Conditions.greaterThanOrEqual(0.0));
}
 
Example 4
Source File: RandomTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBinomialDistribution2() {
    Random random1 = Nd4j.getRandomFactory().getNewRandomInstance(119);
    Random random2 = Nd4j.getRandomFactory().getNewRandomInstance(119);

    INDArray z1 = Nd4j.create(DataType.FLOAT, 1000);
    INDArray z2 = Nd4j.zeros(DataType.FLOAT,1000);
    INDArray z1Dup = Nd4j.zeros(DataType.FLOAT,1000);

    INDArray probs = Nd4j.create(new float[] {0.25f, 0.43f, 0.55f, 0.43f, 0.25f});

    BinomialDistribution op1 = new BinomialDistribution(z1, 5, probs);
    BinomialDistribution op2 = new BinomialDistribution(z2, 5, probs);

    Nd4j.getExecutioner().exec(op1, random1);
    Nd4j.getExecutioner().exec(op2, random2);

    assertNotEquals(z1Dup, z1);

    assertEquals(z1, z2);

    BooleanIndexing.and(z1, Conditions.lessThanOrEqual(5.0));
    BooleanIndexing.and(z1, Conditions.greaterThanOrEqual(0.0));
}
 
Example 5
Source File: PLNetInputOptimizer.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
/**
 * Optimizes the given loss function with respect to a given PLNet's inputs using gradient descent. Ensures the outcome will be within the range of 0 and 1.
 * Performs gradient descent for a given number of steps starting at a given input, using a linearly decaying learning rate.
 * The inputs that should be optimized can be specified using a 0,1-vector
 * @param plNet					PLNet whose inputs to optimize.
 * @param input					Initial inputs to start the gradient descent procedure from.
 * @param loss					The loss to be minimized.
 * @param initialLearningRate	The initial learning rate.
 * @param finalLearningRate		The value the learning rate should decay to.
 * @param numSteps				The number of steps to perform gradient descent for.
 * @param inputMask				0,1 vector specifying the inputs to optimize, i.e. should have a 1 at the index of any input that should be optimized and a 0 elsewhere.
 * @return						The input optimized with respect to the given loss.
 */
public INDArray optimizeInput(PLNetDyadRanker plNet, INDArray input, InputOptimizerLoss loss, double initialLearningRate, double finalLearningRate, int numSteps,
		INDArray inputMask) {
	INDArray inp = input.dup();
	INDArray alphas = Nd4j.zeros(inp.shape());
	INDArray betas = Nd4j.zeros(inp.shape());
	INDArray ones = Nd4j.ones(inp.shape());
	double output = plNet.getPlNet().output(inp).getDouble(0);
	double incumbentOutput = output;
	INDArray incumbent = inp.dup();
	for (int i = 0; i < numSteps; i++) {
		double lrDecayTerm = (double) i / (double) numSteps;
		double learningRate = (1 - lrDecayTerm) * initialLearningRate + lrDecayTerm * finalLearningRate;
		// Gradient of PLNet
		INDArray grad = computeInputDerivative(plNet, inp, loss);
		// Gradient of KKT term
		grad.subi(alphas);
		grad.addi(betas);
		// Apply gradient to alphas and betas
		alphas.subi(inp);
		betas.addi(inp.sub(ones));
		BooleanIndexing.replaceWhere(alphas, 0.0d, Conditions.lessThan(0.0d));
		BooleanIndexing.replaceWhere(betas, 0.0d, Conditions.lessThan(0.0d));
		grad.muli(inputMask);
		grad.muli(learningRate);
		inp.subi(grad);

		output = plNet.getPlNet().output(inp).getDouble(0);
		if (listener != null) {
			listener.reportOptimizationStep(inp, output);
		}

		INDArray incCheck = inp.dup().muli(inputMask);
		if (output > incumbentOutput && BooleanIndexing.and(incCheck, Conditions.greaterThanOrEqual(0.0d)) && BooleanIndexing.and(incCheck, Conditions.lessThanOrEqual(1.0d))) {
			incumbent = inp.dup();
			incumbentOutput = output;
		}
	}

	return incumbent;
}