org.apache.commons.math3.random.GaussianRandomGenerator Java Examples

The following examples show how to use org.apache.commons.math3.random.GaussianRandomGenerator. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Test demonstrating that the user exception is finally thrown if none
 * of the runs succeed.
 */
@Test(expected=TestException.class)
public void testNoOptimum() {
    JacobianMultivariateVectorOptimizer underlyingOptimizer
        = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(12373523445l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer
        = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);
    optimizer.optimize(new MaxEval(100),
                       new Target(new double[] { 0 }),
                       new Weight(new double[] { 1 }),
                       new InitialGuess(new double[] { 0 }),
                       new ModelFunction(new MultivariateVectorFunction() {
                               public double[] value(double[] point) {
                                   throw new TestException();
                               }
                           }));
}
 
Example #2
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Test demonstrating that the user exception is finally thrown if none
 * of the runs succeed.
 */
@Test(expected=TestException.class)
public void testNoOptimum() {
    JacobianMultivariateVectorOptimizer underlyingOptimizer
        = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(12373523445l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer
        = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);
    optimizer.optimize(new MaxEval(100),
                       new Target(new double[] { 0 }),
                       new Weight(new double[] { 1 }),
                       new InitialGuess(new double[] { 0 }),
                       new ModelFunction(new MultivariateVectorFunction() {
                               public double[] value(double[] point) {
                                   throw new TestException();
                               }
                           }));
}
 
Example #3
Source File: MultivariateMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1.0e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 }, { 0.9, 1.2 } , {  3.5, -2.3 }
        });
    underlying.setSimplex(simplex);
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultivariateMultiStartOptimizer optimizer =
        new MultivariateMultiStartOptimizer(underlying, 10, generator);
    PointValuePair optimum =
        optimizer.optimize(1100, rosenbrock, GoalType.MINIMIZE, new double[] { -1.2, 1.0 });

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 8.0e-4);
}
 
Example #4
Source File: MultivariateMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1.0e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 }, { 0.9, 1.2 } , {  3.5, -2.3 }
        });
    underlying.setSimplex(simplex);
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultivariateMultiStartOptimizer optimizer =
        new MultivariateMultiStartOptimizer(underlying, 10, generator);
    PointValuePair optimum =
        optimizer.optimize(1100, rosenbrock, GoalType.MINIMIZE, new double[] { -1.2, 1.0 });

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 8.0e-4);
}
 
Example #5
Source File: MultivariateMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1.0e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 }, { 0.9, 1.2 } , {  3.5, -2.3 }
        });
    underlying.setSimplex(simplex);
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultivariateMultiStartOptimizer optimizer =
        new MultivariateMultiStartOptimizer(underlying, 10, generator);
    PointValuePair optimum =
        optimizer.optimize(1100, rosenbrock, GoalType.MINIMIZE, new double[] { -1.2, 1.0 });

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 8.0e-4);
}
 
Example #6
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Test demonstrating that the user exception is finally thrown if none
 * of the runs succeed.
 */
@Test(expected=TestException.class)
public void testNoOptimum() {
    JacobianMultivariateVectorOptimizer underlyingOptimizer
        = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(12373523445l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer
        = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);
    optimizer.optimize(new MaxEval(100),
                       new Target(new double[] { 0 }),
                       new Weight(new double[] { 1 }),
                       new InitialGuess(new double[] { 0 }),
                       new ModelFunction(new MultivariateVectorFunction() {
                               public double[] value(double[] point) {
                                   throw new TestException();
                               }
                           }));
}
 
Example #7
Source File: DifferentiableMultivariateVectorMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
@Test(expected=TestException.class)
public void testNoOptimum() {
    DifferentiableMultivariateVectorOptimizer underlyingOptimizer =
        new GaussNewtonOptimizer(true,
                                 new SimpleVectorValueChecker(1.0e-6, 1.0e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(12373523445l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    DifferentiableMultivariateVectorMultiStartOptimizer optimizer =
        new DifferentiableMultivariateVectorMultiStartOptimizer(underlyingOptimizer,
                                                                   10, generator);
    optimizer.optimize(100, new DifferentiableMultivariateVectorFunction() {
            public MultivariateMatrixFunction jacobian() {
                return null;
            }
            public double[] value(double[] point) {
                throw new TestException();
            }
        }, new double[] { 2 }, new double[] { 1 }, new double[] { 0 });
}
 
Example #8
Source File: MultivariateMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1.0e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 }, { 0.9, 1.2 } , {  3.5, -2.3 }
        });
    underlying.setSimplex(simplex);
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultivariateMultiStartOptimizer optimizer =
        new MultivariateMultiStartOptimizer(underlying, 10, generator);
    PointValuePair optimum =
        optimizer.optimize(1100, rosenbrock, GoalType.MINIMIZE, new double[] { -1.2, 1.0 });

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 8.0e-4);
}
 
Example #9
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Test demonstrating that the user exception is finally thrown if none
 * of the runs succeed.
 */
@Test(expected=TestException.class)
public void testNoOptimum() {
    JacobianMultivariateVectorOptimizer underlyingOptimizer
        = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(12373523445l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer
        = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);
    optimizer.optimize(new MaxEval(100),
                       new Target(new double[] { 0 }),
                       new Weight(new double[] { 1 }),
                       new InitialGuess(new double[] { 0 }),
                       new ModelFunction(new MultivariateVectorFunction() {
                               public double[] value(double[] point) {
                                   throw new TestException();
                               }
                           }));
}
 
Example #10
Source File: MultivariateMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1.0e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 }, { 0.9, 1.2 } , {  3.5, -2.3 }
        });
    underlying.setSimplex(simplex);
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultivariateMultiStartOptimizer optimizer =
        new MultivariateMultiStartOptimizer(underlying, 10, generator);
    PointValuePair optimum =
        optimizer.optimize(1100, rosenbrock, GoalType.MINIMIZE, new double[] { -1.2, 1.0 });

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 8.0e-4);
}
 
Example #11
Source File: DifferentiableMultivariateVectorMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
@Test(expected=TestException.class)
public void testNoOptimum() {
    DifferentiableMultivariateVectorOptimizer underlyingOptimizer =
        new GaussNewtonOptimizer(true,
                                 new SimpleVectorValueChecker(1.0e-6, 1.0e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(12373523445l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    DifferentiableMultivariateVectorMultiStartOptimizer optimizer =
        new DifferentiableMultivariateVectorMultiStartOptimizer(underlyingOptimizer,
                                                                   10, generator);
    optimizer.optimize(100, new DifferentiableMultivariateVectorFunction() {
            public MultivariateMatrixFunction jacobian() {
                return null;
            }
            public double[] value(double[] point) {
                throw new TestException();
            }
        }, new double[] { 2 }, new double[] { 1 }, new double[] { 0 });
}
 
Example #12
Source File: MultivariateMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1.0e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 }, { 0.9, 1.2 } , {  3.5, -2.3 }
        });
    underlying.setSimplex(simplex);
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultivariateMultiStartOptimizer optimizer =
        new MultivariateMultiStartOptimizer(underlying, 10, generator);
    PointValuePair optimum =
        optimizer.optimize(1100, rosenbrock, GoalType.MINIMIZE, new double[] { -1.2, 1.0 });

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 8.0e-4);
}
 
Example #13
Source File: MultivariateMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1.0e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 }, { 0.9, 1.2 } , {  3.5, -2.3 }
        });
    underlying.setSimplex(simplex);
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultivariateMultiStartOptimizer optimizer =
        new MultivariateMultiStartOptimizer(underlying, 10, generator);
    PointValuePair optimum =
        optimizer.optimize(1100, rosenbrock, GoalType.MINIMIZE, new double[] { -1.2, 1.0 });

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 8.0e-4);
}
 
Example #14
Source File: MultivariateMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1.0e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 }, { 0.9, 1.2 } , {  3.5, -2.3 }
        });
    underlying.setSimplex(simplex);
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultivariateMultiStartOptimizer optimizer =
        new MultivariateMultiStartOptimizer(underlying, 10, generator);
    PointValuePair optimum =
        optimizer.optimize(1100, rosenbrock, GoalType.MINIMIZE, new double[] { -1.2, 1.0 });

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 8.0e-4);
}
 
Example #15
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Test demonstrating that the user exception is finally thrown if none
 * of the runs succeed.
 */
@Test(expected=TestException.class)
public void testNoOptimum() {
    JacobianMultivariateVectorOptimizer underlyingOptimizer
        = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(12373523445l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer
        = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);
    optimizer.optimize(new MaxEval(100),
                       new Target(new double[] { 0 }),
                       new Weight(new double[] { 1 }),
                       new InitialGuess(new double[] { 0 }),
                       new ModelFunction(new MultivariateVectorFunction() {
                               public double[] value(double[] point) {
                                   throw new TestException();
                               }
                           }));
}
 
Example #16
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test(expected=NullPointerException.class)
public void testGetOptimaBeforeOptimize() {

    JacobianMultivariateVectorOptimizer underlyingOptimizer
        = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer
        = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);

    optimizer.getOptima();
}
 
Example #17
Source File: MultiStartMultivariateOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 },
            { 0.9, 1.2 } ,
            {  3.5, -2.3 }
        });
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer
        = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum
        = optimizer.optimize(new MaxEval(1100),
                             new ObjectiveFunction(rosenbrock),
                             GoalType.MINIMIZE,
                             simplex,
                             new InitialGuess(new double[] { -1.2, 1.0 }));

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 8e-4);
}
 
Example #18
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test(expected=NullPointerException.class)
public void testGetOptimaBeforeOptimize() {

    JacobianMultivariateVectorOptimizer underlyingOptimizer
        = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer
        = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);

    optimizer.getOptima();
}
 
Example #19
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testTrivial() {
    LinearProblem problem
        = new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
    JacobianMultivariateVectorOptimizer underlyingOptimizer
        = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer
        = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);

    PointVectorValuePair optimum
        = optimizer.optimize(new MaxEval(100),
                             problem.getModelFunction(),
                             problem.getModelFunctionJacobian(),
                             problem.getTarget(),
                             new Weight(new double[] { 1 }),
                             new InitialGuess(new double[] { 0 }));
    Assert.assertEquals(1.5, optimum.getPoint()[0], 1e-10);
    Assert.assertEquals(3.0, optimum.getValue()[0], 1e-10);
    PointVectorValuePair[] optima = optimizer.getOptima();
    Assert.assertEquals(10, optima.length);
    for (int i = 0; i < optima.length; i++) {
        Assert.assertEquals(1.5, optima[i].getPoint()[0], 1e-10);
        Assert.assertEquals(3.0, optima[i].getValue()[0], 1e-10);
    }
    Assert.assertTrue(optimizer.getEvaluations() > 20);
    Assert.assertTrue(optimizer.getEvaluations() < 50);
    Assert.assertEquals(100, optimizer.getMaxEvaluations());
}
 
Example #20
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testIssue914() {
    LinearProblem problem = new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
    JacobianMultivariateVectorOptimizer underlyingOptimizer =
            new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6)) {
        @Override
        public PointVectorValuePair optimize(OptimizationData... optData) {
            // filter out simple bounds, as they are not supported
            // by the underlying optimizer, and we don't really care for this test
            OptimizationData[] filtered = optData.clone();
            for (int i = 0; i < filtered.length; ++i) {
                if (filtered[i] instanceof SimpleBounds) {
                    filtered[i] = null;
                }
            }
            return super.optimize(filtered);
        }
    };
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
            new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer =
            new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);

    optimizer.optimize(new MaxEval(100),
                       problem.getModelFunction(),
                       problem.getModelFunctionJacobian(),
                       problem.getTarget(),
                       new Weight(new double[] { 1 }),
                       new InitialGuess(new double[] { 0 }),
                       new SimpleBounds(new double[] { -1.0e-10 }, new double[] {  1.0e-10 }));
    PointVectorValuePair[] optima = optimizer.getOptima();
    // only the first start should have succeeded
    Assert.assertEquals(1, optima.length);

}
 
Example #21
Source File: MultiStartMultivariateOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 },
            { 0.9, 1.2 } ,
            {  3.5, -2.3 }
        });
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    int nbStarts = 10;
    MultiStartMultivariateOptimizer optimizer
        = new MultiStartMultivariateOptimizer(underlying, nbStarts, generator);
    PointValuePair optimum
        = optimizer.optimize(new MaxEval(1100),
                             new ObjectiveFunction(rosenbrock),
                             GoalType.MINIMIZE,
                             simplex,
                             new InitialGuess(new double[] { -1.2, 1.0 }));
    Assert.assertEquals(nbStarts, optimizer.getOptima().length);

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 5e-5);
}
 
Example #22
Source File: GaussianFileListGenerator.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Override
public Iterator<List<HStoreFile>> iterator() {
  return new Iterator<List<HStoreFile>>() {
    private GaussianRandomGenerator gen =
        new GaussianRandomGenerator(new MersenneTwister(random.nextInt()));
    private int count = 0;

    @Override
    public boolean hasNext() {
      return count < MAX_FILE_GEN_ITERS;
    }

    @Override
    public List<HStoreFile> next() {
      count += 1;
      ArrayList<HStoreFile> files = new ArrayList<>(NUM_FILES_GEN);
      for (int i = 0; i < NUM_FILES_GEN; i++) {
        files.add(createMockStoreFile(
            (int) Math.ceil(Math.max(0, gen.nextNormalizedDouble() * 32 + 32)))
        );
      }

      return files;
    }

    @Override
    public void remove() {
    }
  };
}
 
Example #23
Source File: MultiStartMultivariateOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testCircleFitting() {
    CircleScalar circle = new CircleScalar();
    circle.addPoint( 30.0,  68.0);
    circle.addPoint( 50.0,  -6.0);
    circle.addPoint(110.0, -20.0);
    circle.addPoint( 35.0,  15.0);
    circle.addPoint( 45.0,  97.0);
    // TODO: the wrapper around NonLinearConjugateGradientOptimizer is a temporary hack for
    // version 3.1 of the library. It should be removed when NonLinearConjugateGradientOptimizer
    // will officially be declared as implementing MultivariateDifferentiableOptimizer
    GradientMultivariateOptimizer underlying
        = new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
                                                  new SimpleValueChecker(1e-10, 1e-10));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(753289573253l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(new double[] { 50, 50 },
                                                new double[] { 10, 10 },
                                                new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer
        = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum
        = optimizer.optimize(new MaxEval(200),
                             circle.getObjectiveFunction(),
                             circle.getObjectiveFunctionGradient(),
                             GoalType.MINIMIZE,
                             new InitialGuess(new double[] { 98.680, 47.345 }));
    Assert.assertEquals(200, optimizer.getMaxEvaluations());
    PointValuePair[] optima = optimizer.getOptima();
    for (PointValuePair o : optima) {
        Vector2D center = new Vector2D(o.getPointRef()[0], o.getPointRef()[1]);
        Assert.assertEquals(69.960161753, circle.getRadius(center), 1e-8);
        Assert.assertEquals(96.075902096, center.getX(), 1e-8);
        Assert.assertEquals(48.135167894, center.getY(), 1e-8);
    }
    Assert.assertTrue(optimizer.getEvaluations() > 70);
    Assert.assertTrue(optimizer.getEvaluations() < 90);
    Assert.assertEquals(3.1267527, optimum.getValue(), 1e-8);
}
 
Example #24
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testTrivial() {
    LinearProblem problem
        = new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
    JacobianMultivariateVectorOptimizer underlyingOptimizer
        = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer
        = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);

    PointVectorValuePair optimum
        = optimizer.optimize(new MaxEval(100),
                             problem.getModelFunction(),
                             problem.getModelFunctionJacobian(),
                             problem.getTarget(),
                             new Weight(new double[] { 1 }),
                             new InitialGuess(new double[] { 0 }));
    Assert.assertEquals(1.5, optimum.getPoint()[0], 1e-10);
    Assert.assertEquals(3.0, optimum.getValue()[0], 1e-10);
    PointVectorValuePair[] optima = optimizer.getOptima();
    Assert.assertEquals(10, optima.length);
    for (int i = 0; i < optima.length; i++) {
        Assert.assertEquals(1.5, optima[i].getPoint()[0], 1e-10);
        Assert.assertEquals(3.0, optima[i].getValue()[0], 1e-10);
    }
    Assert.assertTrue(optimizer.getEvaluations() > 20);
    Assert.assertTrue(optimizer.getEvaluations() < 50);
    Assert.assertEquals(100, optimizer.getMaxEvaluations());
}
 
Example #25
Source File: MultiStartMultivariateOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testRosenbrock() {
    Rosenbrock rosenbrock = new Rosenbrock();
    SimplexOptimizer underlying
        = new SimplexOptimizer(new SimpleValueChecker(-1, 1e-3));
    NelderMeadSimplex simplex = new NelderMeadSimplex(new double[][] {
            { -1.2,  1.0 },
            { 0.9, 1.2 } ,
            {  3.5, -2.3 }
        });
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer
        = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum
        = optimizer.optimize(new MaxEval(1100),
                             new ObjectiveFunction(rosenbrock),
                             GoalType.MINIMIZE,
                             simplex,
                             new InitialGuess(new double[] { -1.2, 1.0 }));

    Assert.assertEquals(rosenbrock.getCount(), optimizer.getEvaluations());
    Assert.assertTrue(optimizer.getEvaluations() > 900);
    Assert.assertTrue(optimizer.getEvaluations() < 1200);
    Assert.assertTrue(optimum.getValue() < 8e-4);
}
 
Example #26
Source File: MultiStartMultivariateOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testCircleFitting() {
    CircleScalar circle = new CircleScalar();
    circle.addPoint( 30.0,  68.0);
    circle.addPoint( 50.0,  -6.0);
    circle.addPoint(110.0, -20.0);
    circle.addPoint( 35.0,  15.0);
    circle.addPoint( 45.0,  97.0);
    // TODO: the wrapper around NonLinearConjugateGradientOptimizer is a temporary hack for
    // version 3.1 of the library. It should be removed when NonLinearConjugateGradientOptimizer
    // will officially be declared as implementing MultivariateDifferentiableOptimizer
    GradientMultivariateOptimizer underlying
        = new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
                                                  new SimpleValueChecker(1e-10, 1e-10));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(753289573253l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(new double[] { 50, 50 },
                                                new double[] { 10, 10 },
                                                new GaussianRandomGenerator(g));
    MultiStartMultivariateOptimizer optimizer
        = new MultiStartMultivariateOptimizer(underlying, 10, generator);
    PointValuePair optimum
        = optimizer.optimize(new MaxEval(200),
                             circle.getObjectiveFunction(),
                             circle.getObjectiveFunctionGradient(),
                             GoalType.MINIMIZE,
                             new InitialGuess(new double[] { 98.680, 47.345 }));
    Assert.assertEquals(200, optimizer.getMaxEvaluations());
    PointValuePair[] optima = optimizer.getOptima();
    for (PointValuePair o : optima) {
        Vector2D center = new Vector2D(o.getPointRef()[0], o.getPointRef()[1]);
        Assert.assertEquals(69.960161753, circle.getRadius(center), 1e-8);
        Assert.assertEquals(96.075902096, center.getX(), 1e-8);
        Assert.assertEquals(48.135167894, center.getY(), 1e-8);
    }
    Assert.assertTrue(optimizer.getEvaluations() > 70);
    Assert.assertTrue(optimizer.getEvaluations() < 90);
    Assert.assertEquals(3.1267527, optimum.getValue(), 1e-8);
}
 
Example #27
Source File: MultiStartMultivariateVectorOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test(expected=NullPointerException.class)
public void testGetOptimaBeforeOptimize() {

    JacobianMultivariateVectorOptimizer underlyingOptimizer
        = new GaussNewtonOptimizer(true, new SimpleVectorValueChecker(1e-6, 1e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator
        = new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    MultiStartMultivariateVectorOptimizer optimizer
        = new MultiStartMultivariateVectorOptimizer(underlyingOptimizer, 10, generator);

    optimizer.getOptima();
}
 
Example #28
Source File: OnlineStatisticsProviderTest.java    From metron with Apache License 2.0 5 votes vote down vote up
@Test
public void testNormallyDistributedRandomData() {
  List<Double> values = new ArrayList<>();
  GaussianRandomGenerator gaussian = new GaussianRandomGenerator(new MersenneTwister(0L));
  for(int i = 0;i < 1000000;++i) {
    double d = gaussian.nextNormalizedDouble();
    values.add(d);
  }
  validateEquality(values);
}
 
Example #29
Source File: DifferentiableMultivariateVectorMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testTrivial() {
    LinearProblem problem =
        new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
    DifferentiableMultivariateVectorOptimizer underlyingOptimizer =
        new GaussNewtonOptimizer(true,
                                 new SimpleVectorValueChecker(1.0e-6, 1.0e-6));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
    DifferentiableMultivariateVectorMultiStartOptimizer optimizer =
        new DifferentiableMultivariateVectorMultiStartOptimizer(underlyingOptimizer,
                                                                   10, generator);

    // no optima before first optimization attempt
    try {
        optimizer.getOptima();
        Assert.fail("an exception should have been thrown");
    } catch (MathIllegalStateException ise) {
        // expected
    }
    PointVectorValuePair optimum =
        optimizer.optimize(100, problem, problem.target, new double[] { 1 }, new double[] { 0 });
    Assert.assertEquals(1.5, optimum.getPoint()[0], 1.0e-10);
    Assert.assertEquals(3.0, optimum.getValue()[0], 1.0e-10);
    PointVectorValuePair[] optima = optimizer.getOptima();
    Assert.assertEquals(10, optima.length);
    for (int i = 0; i < optima.length; ++i) {
        Assert.assertEquals(1.5, optima[i].getPoint()[0], 1.0e-10);
        Assert.assertEquals(3.0, optima[i].getValue()[0], 1.0e-10);
    }
    Assert.assertTrue(optimizer.getEvaluations() > 20);
    Assert.assertTrue(optimizer.getEvaluations() < 50);
    Assert.assertEquals(100, optimizer.getMaxEvaluations());
}
 
Example #30
Source File: DifferentiableMultivariateMultiStartOptimizerTest.java    From astor with GNU General Public License v2.0 5 votes vote down vote up
@Test
public void testCircleFitting() {
    Circle circle = new Circle();
    circle.addPoint( 30.0,  68.0);
    circle.addPoint( 50.0,  -6.0);
    circle.addPoint(110.0, -20.0);
    circle.addPoint( 35.0,  15.0);
    circle.addPoint( 45.0,  97.0);
    NonLinearConjugateGradientOptimizer underlying =
        new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
                                                new SimpleValueChecker(1.0e-10, 1.0e-10));
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(753289573253l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(new double[] { 50.0, 50.0 }, new double[] { 10.0, 10.0 },
                                              new GaussianRandomGenerator(g));
    DifferentiableMultivariateMultiStartOptimizer optimizer =
        new DifferentiableMultivariateMultiStartOptimizer(underlying, 10, generator);
    PointValuePair optimum =
        optimizer.optimize(200, circle, GoalType.MINIMIZE, new double[] { 98.680, 47.345 });
    Assert.assertEquals(200, optimizer.getMaxEvaluations());
    PointValuePair[] optima = optimizer.getOptima();
    for (PointValuePair o : optima) {
        Point2D.Double center = new Point2D.Double(o.getPointRef()[0], o.getPointRef()[1]);
        Assert.assertEquals(69.960161753, circle.getRadius(center), 1.0e-8);
        Assert.assertEquals(96.075902096, center.x, 1.0e-8);
        Assert.assertEquals(48.135167894, center.y, 1.0e-8);
    }
    Assert.assertTrue(optimizer.getEvaluations() > 70);
    Assert.assertTrue(optimizer.getEvaluations() < 90);
    Assert.assertEquals(3.1267527, optimum.getValue(), 1.0e-8);
}