Package org.apache.commons.math3.linear

Examples of org.apache.commons.math3.linear.RealVector


        }

        /** {@inheritDoc} */
        public Evaluation evaluate(final RealVector point) {
            // Copy so optimizer can change point without changing our instance.
            final RealVector p = paramValidator == null ?
                point.copy() :
                paramValidator.validate(point.copy());

            if (lazyEvaluation) {
                return new LazyUnweightedEvaluation((ValueAndJacobianFunction) model,
View Full Code Here


    /** {@inheritDoc} */
    public RealVector getSigma(double covarianceSingularityThreshold) {
        final RealMatrix cov = this.getCovariances(covarianceSingularityThreshold);
        final int nC = cov.getColumnDimension();
        final RealVector sig = new ArrayRealVector(nC);
        for (int i = 0; i < nC; ++i) {
            sig.setEntry(i, FastMath.sqrt(cov.getEntry(i,i)));
        }
        return sig;
    }
View Full Code Here

        // Computation will be useless without a checker (see "for-loop").
        if (checker == null) {
            throw new NullArgumentException();
        }

        RealVector currentPoint = lsp.getStart();

        // iterate until convergence is reached
        Evaluation current = null;
        while (true) {
            iterationCounter.incrementCount();

            // evaluate the objective function and its jacobian
            Evaluation previous = current;
            // Value of the objective function at "currentPoint".
            evaluationCounter.incrementCount();
            current = lsp.evaluate(currentPoint);
            final RealVector currentResiduals = current.getResiduals();
            final RealMatrix weightedJacobian = current.getJacobian();
            currentPoint = current.getPoint();

            // Check convergence.
            if (previous != null) {
                if (checker.converged(iterationCounter.getCount(), previous, current)) {
                    return new OptimumImpl(
                            current,
                            evaluationCounter.getCount(),
                            iterationCounter.getCount());
                }
            }

            // solve the linearized least squares problem
            final RealVector dX = this.decomposition.solve(weightedJacobian, currentResiduals);
            // update the estimated parameters
            currentPoint = currentPoint.add(dX);
        }
    }
View Full Code Here

        //since the normal matrix is symmetric, we only need to compute half of it.
        final int nR = jacobian.getRowDimension();
        final int nC = jacobian.getColumnDimension();
        //allocate space for return values
        final RealMatrix normal = MatrixUtils.createRealMatrix(nC, nC);
        final RealVector jTr = new ArrayRealVector(nC);
        //for each measurement
        for (int i = 0; i < nR; ++i) {
            //compute JTr for measurement i
            for (int j = 0; j < nC; j++) {
                jTr.setEntry(j, jTr.getEntry(j) +
                        residuals.getEntry(i) * jacobian.getEntry(i, j));
            }

            // add the the contribution to the normal matrix for measurement i
            for (int k = 0; k < nC; ++k) {
View Full Code Here

        // invert S
        RealMatrix invertedS = MatrixUtils.inverse(s);

        // Inn = z(k) - H * xHat(k)-
        RealVector innovation = z.subtract(measurementMatrix.operate(stateEstimation));

        // calculate gain matrix
        // K(k) = P(k)- * H' * (H * P(k)- * H' + R)^-1
        // K(k) = P(k)- * H' * S^-1
        RealMatrix kalmanGain = errorCovariance.multiply(measurementMatrixT).multiply(invertedS);
View Full Code Here

        if (getNumObjectiveFunctions() == 2) {
            matrix.setEntry(0, 0, -1);
        }
        int zIndex = (getNumObjectiveFunctions() == 1) ? 0 : 1;
        matrix.setEntry(zIndex, zIndex, maximize ? 1 : -1);
        RealVector objectiveCoefficients =
            maximize ? f.getCoefficients().mapMultiply(-1) : f.getCoefficients();
        copyArray(objectiveCoefficients.toArray(), matrix.getDataRef()[zIndex]);
        matrix.setEntry(zIndex, width - 1,
            maximize ? f.getConstantTerm() : -1 * f.getConstantTerm());

        if (!restrictToNonNegative) {
            matrix.setEntry(zIndex, getSlackVariableOffset() - 1,
View Full Code Here

                    }
                    hs.setEntry(i, hs.getEntry(i) + modelSecondDerivativesValues.getEntry(ih) * s.getEntry(j));
                    ih++;
                }
            }
            final RealVector tmp = interpolationPoints.operate(s).ebeMultiply(modelSecondDerivativesParameters);
            for (int k = 0; k < npt; k++) {
                if (modelSecondDerivativesParameters.getEntry(k) != ZERO) {
                    for (int i = 0; i < n; i++) {
                        hs.setEntry(i, hs.getEntry(i) + tmp.getEntry(k) * interpolationPoints.getEntry(k, i));
                    }
                }
            }
            if (crvmin != ZERO) {
                state = 50; break;
View Full Code Here

                    }
                    hs.setEntry(i, hs.getEntry(i) + modelSecondDerivativesValues.getEntry(ih) * s.getEntry(j));
                    ih++;
                }
            }
            final RealVector tmp = interpolationPoints.operate(s).ebeMultiply(modelSecondDerivativesParameters);
            for (int k = 0; k < npt; k++) {
                if (modelSecondDerivativesParameters.getEntry(k) != ZERO) {
                    for (int i = 0; i < n; i++) {
                        hs.setEntry(i, hs.getEntry(i) + tmp.getEntry(k) * interpolationPoints.getEntry(k, i));
                    }
                }
            }
            if (crvmin != ZERO) {
                state = 50; break;
View Full Code Here

        for (int i = 0; i < numParams; i++) {
            paramsFoundByDirectSolution[i] = new SummaryStatistics();
            sigmaEstimate[i] = new SummaryStatistics();
        }

        final RealVector init = new ArrayRealVector(new double[]{ slope, offset }, false);

        // Monte-Carlo (generates many sets of observations).
        final int mcRepeat = MONTE_CARLO_RUNS;
        int mcCount = 0;
        while (mcCount < mcRepeat) {
            // Observations.
            final Point2D.Double[] obs = lineGenerator.generate(numObs);

            final StraightLineProblem problem = new StraightLineProblem(yError);
            for (int i = 0; i < numObs; i++) {
                final Point2D.Double p = obs[i];
                problem.addPoint(p.x, p.y);
            }

            // Direct solution (using simple regression).
            final double[] regress = problem.solve();

            // Estimation of the standard deviation (diagonal elements of the
            // covariance matrix).
            final LeastSquaresProblem lsp = builder(problem).build();

            final RealVector sigma = lsp.evaluate(init).getSigma(1e-14);

            // Accumulate statistics.
            for (int i = 0; i < numParams; i++) {
                paramsFoundByDirectSolution[i].addValue(regress[i]);
                sigmaEstimate[i].addValue(sigma.getEntry(i));
            }

            // Next Monte-Carlo.
            ++mcCount;
        }
View Full Code Here

            final Point2D.Double p = obs[i];
            problem.addPoint(p.x, p.y);
        }

        // Direct solution (using simple regression).
        final RealVector regress = new ArrayRealVector(problem.solve(), false);

        // Dummy optimizer (to compute the chi-square).
        final LeastSquaresProblem lsp = builder(problem).build();

        // Get chi-square of the best parameters set for the given set of
        // observations.
        final double bestChi2N = getChi2N(lsp, regress);
        final RealVector sigma = lsp.evaluate(regress).getSigma(1e-14);

        // Monte-Carlo (generates a grid of parameters).
        final int mcRepeat = MONTE_CARLO_RUNS;
        final int gridSize = (int) FastMath.sqrt(mcRepeat);

        // Parameters found for each of Monte-Carlo run.
        // Index 0 = slope
        // Index 1 = offset
        // Index 2 = normalized chi2
        final List<double[]> paramsAndChi2 = new ArrayList<double[]>(gridSize * gridSize);

        final double slopeRange = 10 * sigma.getEntry(0);
        final double offsetRange = 10 * sigma.getEntry(1);
        final double minSlope = slope - 0.5 * slopeRange;
        final double minOffset = offset - 0.5 * offsetRange;
        final double deltaSlope =  slopeRange/ gridSize;
        final double deltaOffset = offsetRange / gridSize;
        for (int i = 0; i < gridSize; i++) {
            final double s = minSlope + i * deltaSlope;
            for (int j = 0; j < gridSize; j++) {
                final double o = minOffset + j * deltaOffset;
                final double chi2N = getChi2N(lsp,
                        new ArrayRealVector(new double[] {s, o}, false));

                paramsAndChi2.add(new double[] {s, o, chi2N});
            }
        }

        // Output (for use with "gnuplot").

        // Some info.

        // For plotting separately sets of parameters that have a large chi2.
        final double chi2NPlusOne = bestChi2N + 1;
        int numLarger = 0;

        final String lineFmt = "%+.10e %+.10e   %.8e\n";

        // Point with smallest chi-square.
        System.out.printf(lineFmt, regress.getEntry(0), regress.getEntry(1), bestChi2N);
        System.out.println(); // Empty line.

        // Points within the confidence interval.
        for (double[] d : paramsAndChi2) {
            if (d[2] <= chi2NPlusOne) {
                System.out.printf(lineFmt, d[0], d[1], d[2]);
            }
        }
        System.out.println(); // Empty line.

        // Points outside the confidence interval.
        for (double[] d : paramsAndChi2) {
            if (d[2] > chi2NPlusOne) {
                ++numLarger;
                System.out.printf(lineFmt, d[0], d[1], d[2]);
            }
        }
        System.out.println(); // Empty line.

        System.out.println("# sigma=" + sigma.toString());
        System.out.println("# " + numLarger + " sets filtered out");
    }
View Full Code Here

TOP

Related Classes of org.apache.commons.math3.linear.RealVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.