Package org.apache.commons.math.random

Examples of org.apache.commons.math.random.JDKRandomGenerator


        correctRanks = new double[] { 2.5, 2.5, 2.5, 2.5 };
        TestUtils.assertEquals(correctRanks, ranks, 0d);
    }

    public void testNaNsFixedTiesRandom() {
        RandomGenerator randomGenerator = new JDKRandomGenerator();
        randomGenerator.setSeed(1000);
        NaturalRanking ranking = new NaturalRanking(NaNStrategy.FIXED,
                randomGenerator);
        double[] ranks = ranking.rank(exampleData);
        double[] correctRanks = { 5, 4, 6, 7, 3, 8, Double.NaN, 1, 4 };
        TestUtils.assertEquals(correctRanks, ranks, 0d);
View Full Code Here


    public void testTrivial() throws FunctionEvaluationException, OptimizationException {
        LinearProblem problem =
            new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
        DifferentiableMultivariateVectorialOptimizer underlyingOptimizer =
            new GaussNewtonOptimizer(true);
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(16069223052l);
        RandomVectorGenerator generator =
            new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
        MultiStartDifferentiableMultivariateVectorialOptimizer optimizer =
            new MultiStartDifferentiableMultivariateVectorialOptimizer(underlyingOptimizer,
                                                                       10, generator);
View Full Code Here

    @Test(expected = OptimizationException.class)
    public void testNoOptimum() throws FunctionEvaluationException, OptimizationException {
        DifferentiableMultivariateVectorialOptimizer underlyingOptimizer =
            new GaussNewtonOptimizer(true);
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(12373523445l);
        RandomVectorGenerator generator =
            new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
        MultiStartDifferentiableMultivariateVectorialOptimizer optimizer =
            new MultiStartDifferentiableMultivariateVectorialOptimizer(underlyingOptimizer,
                                                                       10, generator);
View Full Code Here

    public void testTrivial() throws FunctionEvaluationException, OptimizationException {
        LinearProblem problem =
            new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
        DifferentiableMultivariateVectorialOptimizer underlyingOptimizer =
            new GaussNewtonOptimizer(true);
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(16069223052l);
        RandomVectorGenerator generator =
            new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
        MultiStartDifferentiableMultivariateVectorialOptimizer optimizer =
            new MultiStartDifferentiableMultivariateVectorialOptimizer(underlyingOptimizer,
                                                                       10, generator);
View Full Code Here

    @Test(expected = OptimizationException.class)
    public void testNoOptimum() throws FunctionEvaluationException, OptimizationException {
        DifferentiableMultivariateVectorialOptimizer underlyingOptimizer =
            new GaussNewtonOptimizer(true);
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(12373523445l);
        RandomVectorGenerator generator =
            new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
        MultiStartDifferentiableMultivariateVectorialOptimizer optimizer =
            new MultiStartDifferentiableMultivariateVectorialOptimizer(underlyingOptimizer,
                                                                       10, generator);
View Full Code Here

     * with this error structure. Then verify that GLS estimated coefficients,
     * on average, perform better than OLS.
     */
    @Test
    public void testGLSEfficiency() throws Exception {
        RandomGenerator rg = new JDKRandomGenerator();
        rg.setSeed(200)// Seed has been selected to generate non-trivial covariance
       
        // Assume model has 16 observations (will use Longley data).  Start by generating
        // non-constant variances for the 16 error terms.
        final int nObs = 16;
        double[] sigma = new double[nObs];
        for (int i = 0; i < nObs; i++) {
            sigma[i] = 10 * rg.nextDouble();
        }
       
        // Now generate 1000 error vectors to use to estimate the covariance matrix
        // Columns are draws on N(0, sigma[col])
        final int numSeeds = 1000;
        RealMatrix errorSeeds = MatrixUtils.createRealMatrix(numSeeds, nObs);
        for (int i = 0; i < numSeeds; i++) {
            for (int j = 0; j < nObs; j++) {
                errorSeeds.setEntry(i, j, rg.nextGaussian() * sigma[j]);
            }
        }
       
        // Get covariance matrix for columns
        RealMatrix cov = (new Covariance(errorSeeds)).getCovarianceMatrix();
View Full Code Here

        correctRanks = new double[] { 2.5, 2.5, 2.5, 2.5 };
        TestUtils.assertEquals(correctRanks, ranks, 0d);
    }

    public void testNaNsFixedTiesRandom() {
        RandomGenerator randomGenerator = new JDKRandomGenerator();
        randomGenerator.setSeed(1000);
        NaturalRanking ranking = new NaturalRanking(NaNStrategy.FIXED,
                randomGenerator);
        double[] ranks = ranking.rank(exampleData);
        double[] correctRanks = { 5, 4, 6, 7, 3, 8, Double.NaN, 1, 4 };
        TestUtils.assertEquals(correctRanks, ranks, 0d);
View Full Code Here

    @Test
    public void testSinMin() throws MathException {
        UnivariateRealFunction f = new SinFunction();
        UnivariateRealOptimizer underlying = new BrentOptimizer();
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(44428400075l);
        MultiStartUnivariateRealOptimizer minimizer =
            new MultiStartUnivariateRealOptimizer(underlying, 10, g);
        minimizer.optimize(f, GoalType.MINIMIZE, -100.0, 100.0);
        double[] optima = minimizer.getOptima();
        double[] optimaValues = minimizer.getOptimaValues();
View Full Code Here

        // The quintic function has zeros at 0, +-0.5 and +-1.
        // The function has extrema (first derivative is zero) at 0.27195613 and 0.82221643,
        UnivariateRealFunction f = new QuinticFunction();
        UnivariateRealOptimizer underlying = new BrentOptimizer();
        underlying.setRelativeAccuracy(1e-15);
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(4312000053L);
        MultiStartUnivariateRealOptimizer minimizer =
            new MultiStartUnivariateRealOptimizer(underlying, 5, g);
        minimizer.setAbsoluteAccuracy(10 * minimizer.getAbsoluteAccuracy());
        minimizer.setRelativeAccuracy(10 * minimizer.getRelativeAccuracy());

 
View Full Code Here

    Rosenbrock rosenbrock = new Rosenbrock();
    NelderMead underlying = new NelderMead();
    underlying.setStartConfiguration(new double[][] {
                                         { -1.21.0 }, { 0.9, 1.2 } , 3.5, -2.3 }
                                     });
    JDKRandomGenerator g = new JDKRandomGenerator();
    g.setSeed(16069223052l);
    RandomVectorGenerator generator =
        new UncorrelatedRandomVectorGenerator(2, new GaussianRandomGenerator(g));
    MultiStartMultivariateRealOptimizer optimizer =
        new MultiStartMultivariateRealOptimizer(underlying, 10, generator);
    optimizer.setConvergenceChecker(new SimpleScalarValueChecker(-1, 1.0e-3));
View Full Code Here

TOP

Related Classes of org.apache.commons.math.random.JDKRandomGenerator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.