Package org.apache.commons.math3.random

Examples of org.apache.commons.math3.random.JDKRandomGenerator


    @Test(expected=TestException.class)
    public void testNoOptimum() {
        DifferentiableMultivariateVectorOptimizer underlyingOptimizer =
            new GaussNewtonOptimizer(true,
                                     new SimpleVectorValueChecker(1.0e-6, 1.0e-6));
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(12373523445l);
        RandomVectorGenerator generator =
            new UncorrelatedRandomVectorGenerator(1, new GaussianRandomGenerator(g));
        DifferentiableMultivariateVectorMultiStartOptimizer optimizer =
            new DifferentiableMultivariateVectorMultiStartOptimizer(underlyingOptimizer,
                                                                       10, generator);
View Full Code Here


        circle.addPoint( 35.015.0);
        circle.addPoint( 45.097.0);
        NonLinearConjugateGradientOptimizer underlying =
            new NonLinearConjugateGradientOptimizer(ConjugateGradientFormula.POLAK_RIBIERE,
                                                    new SimpleValueChecker(1.0e-10, 1.0e-10));
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(753289573253l);
        RandomVectorGenerator generator =
            new UncorrelatedRandomVectorGenerator(new double[] { 50.0, 50.0 }, new double[] { 10.0, 10.0 },
                                                  new GaussianRandomGenerator(g));
        DifferentiableMultivariateMultiStartOptimizer optimizer =
            new DifferentiableMultivariateMultiStartOptimizer(underlying, 10, generator);
View Full Code Here

     * with this error structure. Then verify that GLS estimated coefficients,
     * on average, perform better than OLS.
     */
    @Test
    public void testGLSEfficiency() throws Exception {
        RandomGenerator rg = new JDKRandomGenerator();
        rg.setSeed(200)// Seed has been selected to generate non-trivial covariance
       
        // Assume model has 16 observations (will use Longley data).  Start by generating
        // non-constant variances for the 16 error terms.
        final int nObs = 16;
        double[] sigma = new double[nObs];
        for (int i = 0; i < nObs; i++) {
            sigma[i] = 10 * rg.nextDouble();
        }
       
        // Now generate 1000 error vectors to use to estimate the covariance matrix
        // Columns are draws on N(0, sigma[col])
        final int numSeeds = 1000;
        RealMatrix errorSeeds = MatrixUtils.createRealMatrix(numSeeds, nObs);
        for (int i = 0; i < numSeeds; i++) {
            for (int j = 0; j < nObs; j++) {
                errorSeeds.setEntry(i, j, rg.nextGaussian() * sigma[j]);
            }
        }
       
        // Get covariance matrix for columns
        RealMatrix cov = (new Covariance(errorSeeds)).getCovarianceMatrix();
View Full Code Here

        TestUtils.assertEquals(correctRanks, ranks, 0d);
    }

    @Test
    public void testNaNsFixedTiesRandom() {
        RandomGenerator randomGenerator = new JDKRandomGenerator();
        randomGenerator.setSeed(1000);
        NaturalRanking ranking = new NaturalRanking(NaNStrategy.FIXED,
                randomGenerator);
        double[] ranks = ranking.rank(exampleData);
        double[] correctRanks = { 5, 4, 6, 7, 3, 8, Double.NaN, 1, 4 };
        TestUtils.assertEquals(correctRanks, ranks, 0d);
View Full Code Here

    @Test
    public void testSinMin() {
        UnivariateFunction f = new SinFunction();
        UnivariateOptimizer underlying = new BrentOptimizer(1e-10, 1e-14);
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(44428400075l);
        UnivariateMultiStartOptimizer<UnivariateFunction> optimizer =
            new UnivariateMultiStartOptimizer<UnivariateFunction>(underlying, 10, g);
        optimizer.optimize(300, f, GoalType.MINIMIZE, -100.0, 100.0);
        UnivariatePointValuePair[] optima = optimizer.getOptima();
        for (int i = 1; i < optima.length; ++i) {
View Full Code Here

    public void testQuinticMin() {
        // The quintic function has zeros at 0, +-0.5 and +-1.
        // The function has extrema (first derivative is zero) at 0.27195613 and 0.82221643,
        UnivariateFunction f = new QuinticFunction();
        UnivariateOptimizer underlying = new BrentOptimizer(1e-9, 1e-14);
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(4312000053L);
        UnivariateMultiStartOptimizer<UnivariateFunction> optimizer =
            new UnivariateMultiStartOptimizer<UnivariateFunction>(underlying, 5, g);

        UnivariatePointValuePair optimum
            = optimizer.optimize(300, f, GoalType.MINIMIZE, -0.3, -0.2);
View Full Code Here

                    }
                    return 0;
                }
            };
        UnivariateOptimizer underlying = new BrentOptimizer(1e-9, 1e-14);
        JDKRandomGenerator g = new JDKRandomGenerator();
        g.setSeed(4312000053L);
        UnivariateMultiStartOptimizer<UnivariateFunction> optimizer =
            new UnivariateMultiStartOptimizer<UnivariateFunction>(underlying, 5, g);
        try {
            optimizer.optimize(300, f, GoalType.MINIMIZE, -0.3, -0.2);
View Full Code Here

        for (int i = 0; i < k; ++i) {
            sumImpl[i]     = new Sum();
            sumSqImpl[i]   = new SumOfSquares();
            minImpl[i]     = new Min();
            maxImpl[i]     = new Max();
            sumLogImpl[i= new SumOfLogs();
            geoMeanImpl[i] = new GeometricMean();
            meanImpl[i]    = new Mean();
        }

        covarianceImpl =
View Full Code Here

        geoMeanImpl = new StorelessUnivariateStatistic[k];
        meanImpl    = new StorelessUnivariateStatistic[k];

        for (int i = 0; i < k; ++i) {
            sumImpl[i]     = new Sum();
            sumSqImpl[i]   = new SumOfSquares();
            minImpl[i]     = new Min();
            maxImpl[i]     = new Max();
            sumLogImpl[i= new SumOfLogs();
            geoMeanImpl[i] = new GeometricMean();
            meanImpl[i]    = new Mean();
View Full Code Here

     * @param checker Convergence checker.
     */
    protected BaseOptimizer(ConvergenceChecker<PAIR> checker) {
        this.checker = checker;

        evaluations = new Incrementor(0, new MaxEvalCallback());
        iterations = new Incrementor(0, new MaxIterCallback());
    }
View Full Code Here

TOP

Related Classes of org.apache.commons.math3.random.JDKRandomGenerator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.