Package org.auraframework.test.perf.metrics

Examples of org.auraframework.test.perf.metrics.PerfMetrics


    @Override
    public void assertDiff(PerfMetrics actual, StringBuilder message) throws Exception {
        if (message == null) {
            message = new StringBuilder();
        }
        PerfMetrics expected = readGoldFile();

        String differentMessage = test.getPerfMetricsComparator().compare(expected, actual);
        if (differentMessage != null) {
            message.append(differentMessage);
            Assert.fail(message.toString());
View Full Code Here


public final class PerfGoldFilesUtilTest extends UnitTestCase {

    public void testPerfMetricsSerialization() throws Exception {
        // generate metrics gold file
        PerfMetrics metrics = new PerfMetrics();
        PerfMetric metric1 = new PerfMetric("metric1", 1);
        metric1.setDetails(new JSONArray("[{\"bytes\":\"3\"}]"));
        metrics.setMetric(metric1);
        metrics.setMetric(new PerfMetric("metric2", 2));
        String text = PerfGoldFilesUtil.toGoldFileText(metrics, true);

        // check the gold file is json-parseable
        JSONArray json = new JSONArray(text);
        assertEquals(3, json.length());

        // read metrics back
        PerfMetrics readMetrics = PerfGoldFilesUtil.fromGoldFileText(text);
        assertEquals(2, readMetrics.size());
        metric1 = readMetrics.getMetric("metric1");
        assertEquals(1, metric1.getIntValue());
        assertEquals(2, readMetrics.getMetric("metric2").getIntValue());

        JSONArray details = metric1.getDetails();
        assertEquals(3, details.getJSONObject(0).getInt("bytes"));
    }
View Full Code Here

    /**
     * @return PerfMetrics from gold file contents
     */
    public static PerfMetrics fromGoldFileText(String text) throws IOException {
        PerfMetrics metrics = new PerfMetrics();
        BufferedReader reader = new BufferedReader(new StringReader(text));
        String line;
        PerfMetric lastMetric = null;
        while ((line = reader.readLine()) != null) {
            try {
                line = line.substring(1);
                if (line.endsWith("]")) {
                    line = line.substring(0, line.length() - 1);
                }
                if (lastMetric != null && line.startsWith("{\"" + lastMetric.getName() + ".details\":")) {
                    JSONObject details = new JSONObject(line);
                    lastMetric.setDetails(details.getJSONArray(lastMetric.getName() + ".details"));
                } else {
                    lastMetric = new PerfMetric(line);
                    metrics.setMetric(lastMetric);
                }
            } catch (JSONException e) {
                throw new RuntimeException(line, e);
            }
        }
View Full Code Here

    private void runPerfTests() throws Throwable {
        int numPerfTimelineRuns = numPerfTimelineRuns();
        int numPerfProfileRuns = numPerfProfileRuns();
        int numPerfAuraRuns = numPerfAuraRuns();
        PerfMetrics timelineMetrics = null;
        PerfMetrics profileMetrics = null;
        PerfMetrics auraMetrics = null;
        int runNumber = 1;
        List<File> runFiles = Lists.newArrayList();

        if (runPerfWarmupRun()) {
            perfRunMode = PerfRunMode.WARMUP;
            // TODO: any metrics that should/could be measured for the first run
            try {
                perBrowserSetUp();
                superRunTest();
            } finally {
                perBrowserTearDown();
            }
        }

        // runs to collect Dev Tools performance metrics
        if (numPerfTimelineRuns > 0) {
            perfRunMode = PerfRunMode.TIMELINE;
            PerfRunsCollector runsCollector = new PerfRunsCollector();
            for (int i = 0; i < numPerfTimelineRuns; i++) {
                try {
                    perBrowserSetUp();

                    PerfMetricsCollector metricsCollector = new PerfMetricsCollector(this, perfRunMode);
                    metricsCollector.startCollecting();

                    superRunTest();

                    PerfMetrics metrics = metricsCollector.stopCollecting();
                    runsCollector.addRun(metrics);

                    if (logger.isLoggable(Level.INFO)) {
                        runFiles.add(PerfResultsUtil.writeDevToolsLog(metrics.getDevToolsLog(), getGoldFileName() + '_'
                                + (i + 1),
                                auraUITestingUtil.getUserAgent()));
                        runFiles.add(PerfResultsUtil
                                .writeGoldFile(metrics, getGoldFileName() + '_' + runNumber++, true));
                    }
                } finally {
                    perBrowserTearDown();
                }
            }
            // use the median run for timeline metrics so individual metrics and dev tools logs match
            timelineMetrics = runsCollector.getMedianRun();
        }

        // runs to collect JavaScript profiling metrics, run separately because affect overall metrics
        if (numPerfProfileRuns > 0) {
            perfRunMode = PerfRunMode.PROFILE;
            PerfRunsCollector runsCollector = new PerfRunsCollector();
            for (int i = 0; i < numPerfProfileRuns; i++) {
                try {
                    perBrowserSetUp();

                    PerfMetricsCollector metricsCollector = new PerfMetricsCollector(this, perfRunMode);
                    metricsCollector.startCollecting();

                    superRunTest();

                    PerfMetrics metrics = metricsCollector.stopCollecting();
                    runsCollector.addRun(metrics);

                    if (logger.isLoggable(Level.INFO)) {
                        Map<String, ?> jsProfilerData = metrics.getJSProfilerData();
                        if (jsProfilerData != null) {
                            runFiles.add(PerfResultsUtil.writeJSProfilerData(jsProfilerData, getGoldFileName() + '_'
                                    + (i + 1)));
                        }
                        Map<String, ?> heapSnapshot = metrics.getHeapSnapshot();
                        if (heapSnapshot != null) {
                            runFiles.add(PerfResultsUtil.writeHeapSnapshot(heapSnapshot, getGoldFileName() + '_'
                                    + (i + 1)));
                        }
                        runFiles.add(PerfResultsUtil
                                .writeGoldFile(metrics, getGoldFileName() + '_' + runNumber++, true));
                    }
                } finally {
                    perBrowserTearDown();
                }
            }
            // use the median run for profile metrics so individual metrics and .cpuprofile match
            profileMetrics = runsCollector.getMedianRun();
        }

        // runs to collect Aura stats metrics
        if (numPerfAuraRuns > 0) {
            perfRunMode = PerfRunMode.AURASTATS;
            // collecting them in separate runs as they need STATS mode
            PerfRunsCollector runsCollector = new PerfRunsCollector();
            for (int i = 0; i < numPerfAuraRuns; i++) {
                try {
                    // TODO: set stats mode for framework tests
                    perBrowserSetUp();

                    PerfMetricsCollector metricsCollector = new PerfMetricsCollector(this, perfRunMode);
                    metricsCollector.startCollecting();

                    superRunTest();

                    PerfMetrics metrics = metricsCollector.stopCollecting();
                    runsCollector.addRun(metrics);
                } finally {
                    perBrowserTearDown();
                }
            }
            auraMetrics = runsCollector.getMedianMetrics();
        }

        perfRunMode = null;

        // combine all metrics, log/write results, perform tests
        PerfMetrics allMetrics = PerfMetrics.combine(timelineMetrics, profileMetrics, auraMetrics);
        if (allMetrics != null) {
            if (logger.isLoggable(Level.INFO)) {
                logger.info("perf metrics for " + this + '\n' + allMetrics.toLongString());
            }
            List<JSONObject> devToolsLog = allMetrics.getDevToolsLog();
            if (devToolsLog != null) {
                PerfResultsUtil.writeDevToolsLog(devToolsLog, getGoldFileName(), auraUITestingUtil.getUserAgent());
            }
            Map<String, ?> jsProfilerData = allMetrics.getJSProfilerData();
            if (jsProfilerData != null) {
                PerfResultsUtil.writeJSProfilerData(jsProfilerData, getGoldFileName());
            }
            Map<String, ?> heapSnapshot = allMetrics.getHeapSnapshot();
            if (heapSnapshot != null) {
                PerfResultsUtil.writeHeapSnapshot(heapSnapshot, getGoldFileName());
            }
            PerfResultsUtil.writeGoldFile(allMetrics, getGoldFileName(), storeDetailsInGoldFile());
View Full Code Here

    public void testButton() throws Exception {
        runWithPerfApp(getDefDescriptor("ui:button"));
    }

    private void verifyButton(PerfMetrics actual) {
        PerfMetrics expected = new PerfMetrics();
        // Timeline metrics
        expected.setMetric("Timeline.Rendering.Layout", 2);
        expected.setMetric("Timeline.Painting.Paint", 2); // button + image
        // Aura Stats metrics:
        expected.setMetric("Aura.CreateComponent.component.added", 9);
        expected.setMetric("Aura.RenderComponent.rerender.removed", 0);
        assertMetrics(expected, actual);

        // verify the component was loaded
        assertEquals("button loaded", LABEL_MOCK,
                AuraTextUtil.urldecode(currentDriver.findElement(By.cssSelector(".uiButton")).getText()));
View Full Code Here

        runWithPerfApp(getDefDescriptor("ui:label"));
    }

    private void verifyLabel(PerfMetrics actual) {
        // check expected metrics
        PerfMetrics expected = new PerfMetrics();
        expected.setMetric("Timeline.Rendering.Layout", 1);
        expected.setMetric("Timeline.Painting.Paint", 1);
        assertMetrics(expected, actual);

        // verify the component was loaded
        assertEquals("label loaded", LABEL_MOCK,
                AuraTextUtil.urldecode(currentDriver.findElement(By.cssSelector(".uiLabel")).getText()));
View Full Code Here

    private void verifyDummyPerf(PerfMetrics actual) {
        if (!SauceUtil.areTestsRunningOnSauce()) {
            logger.warning("skipping test because not running in SauceLabs: " + getName());
            return;
        }
        PerfMetrics expected = new PerfMetrics();
        expected.setMetric("Timeline.Rendering.Layout", 2);
        expected.setMetric("Timeline.Painting.Paint", 3);
        assertMetrics(expected, actual);
    }
View Full Code Here

        PerfRunsCollector runs = new PerfRunsCollector();
        for (int i = 0; i < 2; i++) {
            PerfMetricsCollector perfData = new PerfMetricsCollector(this, PerfRunMode.TIMELINE);
            perfData.startCollecting();
            open("/ui/label.cmp?label=foo");
            PerfMetrics metrics = perfData.stopCollecting();
            runs.addRun(metrics);
        }
        runs.show(System.out);
    }
View Full Code Here

        for (int i = 0; i < 2; i++) {
            getDriver();
            PerfMetricsCollector perfData = new PerfMetricsCollector(this, PerfRunMode.TIMELINE);
            perfData.startCollecting();
            open("/ui/label.cmp?label=foo");
            PerfMetrics metrics = perfData.stopCollecting();
            runs.addRun(metrics);
            quitDriver();
        }
        runs.show(System.out);
    }
View Full Code Here

TOP

Related Classes of org.auraframework.test.perf.metrics.PerfMetrics

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.