Package com.sun.faban.common

Examples of com.sun.faban.common.TextTable


        //logger.info("Map size returning is " + memcacheStats.size());

        //produce a TextTable for each server listed
        Map<String, TextTable> returnMap = new HashMap<String, TextTable>();

        TextTable outputTextTable = null;

        Set<Map.Entry> statEntries = memcacheStats.entrySet();

        //set counter to allow to set number of columns to output
        for (Map.Entry statEntry : statEntries) {
            String key = (String) statEntry.getKey();
            Map statsMap = (Map) statEntry.getValue();
            //is this case, it is a Map with the statistics
            //get size so we know how big to make TextTable
            outputTextTable = new TextTable(statsMap.size(), 2);
            //set Header
            outputTextTable.setHeader(0, "Parameter");
            outputTextTable.setHeader(1, "Value");
            //outputTextTable.setHeader(2, "for " + key);
            //get this value's iterator
            Set<Map.Entry> statsMapEntries = statsMap.entrySet();
            int counter=0;
            for (Map.Entry statsMapEntry : statsMapEntries) {
                outputTextTable.setField(counter, 0,
                        (CharSequence) statsMapEntry.getKey());
                outputTextTable.setField(counter++, 1,
                        (CharSequence) statsMapEntry.getValue());
            }
            //add each TextTable for each server listed to return Map.
            returnMap.put(key, outputTextTable);
View Full Code Here


        String elapsedSecs = String.format("%.3f", elapsed/1000d);

        Map memcacheStats = cache.stats();
        //cache.stats() will return a Map whose key  is the name of the memcache server
        //and whose value is a Map with the memcache statistics
        TextTable outputTextTable = null;
        Set<Map.Entry> serverEntries = memcacheStats.entrySet();

        //set counter to allow to set number of columns to output
        int counter = 0;
        int columnIndex = 0;

        //reset the iterator
        for (Map.Entry serverEntry : serverEntries) {
            String key = (String) serverEntry.getKey();
            Map statsMap = (Map) serverEntry.getValue();
            if (outputTextTable == null) {
                // One extra row for elapsed time, one extra header column.
                outputTextTable = new TextTable(statsMap.size(),
                        serverEntries.size() + 2);
            }
            //is this case, it is a Map with the statistics
            //get size so we know how big to make TextTable
            // the number of rows is the number of stats
            // the number of columns is how many server instances there are
            //set Header
            outputTextTable.setHeader(0, "Elapsed (sec)");
            outputTextTable.setHeader(1, "Parameter");
            outputTextTable.setHeader(columnIndex + 2, key);

            //get this value's iterator
            Set<Map.Entry> statsMapEntries = statsMap.entrySet();
            counter=0; //reset counter

            // Populate the rest of the table.
            for (Map.Entry statsMapEntry : statsMapEntries) {
                outputTextTable.setField(counter, 0, elapsedSecs);
                outputTextTable.setField(counter, 1,
                        (CharSequence) statsMapEntry.getKey());
                outputTextTable.setField(counter++, columnIndex + 2,
                        (CharSequence) statsMapEntry.getValue());
            }
            ++columnIndex;
        }
        return outputTextTable;
View Full Code Here

        //logger.info("Map size returning is " + memcacheStats.size());

        //produce a TextTable for each server listed
        Map<String, TextTable> returnMap = new HashMap<String, TextTable>();

        TextTable outputTextTable = null;

        Set<Map.Entry> statEntries = memcacheStats.entrySet();

        //set counter to allow to set number of columns to output
        for (Map.Entry statEntry : statEntries) {
            String key = (String) statEntry.getKey();
            Map statsMap = (Map) statEntry.getValue();
            //is this case, it is a Map with the statistics
            //get size so we know how big to make TextTable
            outputTextTable = new TextTable(statsMap.size(), 2);
            //set Header
            outputTextTable.setHeader(0, "Parameter");
            outputTextTable.setHeader(1, "Value");
            //outputTextTable.setHeader(2, "for " + key);
            //get this value's iterator
            Set<Map.Entry> statsMapEntries = statsMap.entrySet();
            int counter=0;
            for (Map.Entry statsMapEntry : statsMapEntries) {
                outputTextTable.setField(counter, 0,
                        (CharSequence) statsMapEntry.getKey());
                outputTextTable.setField(counter++, 1,
                        (CharSequence) statsMapEntry.getValue());
            }
            //add each TextTable for each server listed to return Map.
            returnMap.put(key, outputTextTable);
View Full Code Here

        String elapsedSecs = String.format("%.3f", elapsed/1000d);

        Map memcacheStats = cache.stats();
        //cache.stats() will return a Map whose key  is the name of the memcache server
        //and whose value is a Map with the memcache statistics
        TextTable outputTextTable = null;
        Set<Map.Entry> serverEntries = memcacheStats.entrySet();

        //set counter to allow to set number of columns to output
        int counter = 0;
        int columnIndex = 0;

        //reset the iterator
        for (Map.Entry serverEntry : serverEntries) {
            String key = (String) serverEntry.getKey();
            Map statsMap = (Map) serverEntry.getValue();
            if (outputTextTable == null) {
                // One extra row for elapsed time, one extra header column.
                outputTextTable = new TextTable(statsMap.size(),
                        serverEntries.size() + 2);
            }
            //is this case, it is a Map with the statistics
            //get size so we know how big to make TextTable
            // the number of rows is the number of stats
            // the number of columns is how many server instances there are
            //set Header
            outputTextTable.setHeader(0, "Elapsed (sec)");
            outputTextTable.setHeader(1, "Parameter");
            outputTextTable.setHeader(columnIndex + 2, key);

            //get this value's iterator
            Set<Map.Entry> statsMapEntries = statsMap.entrySet();
            counter=0; //reset counter

            // Populate the rest of the table.
            for (Map.Entry statsMapEntry : statsMapEntries) {
                outputTextTable.setField(counter, 0, elapsedSecs);
                outputTextTable.setField(counter, 1,
                        (CharSequence) statsMapEntry.getKey());
                outputTextTable.setField(counter++, columnIndex + 2,
                        (CharSequence) statsMapEntry.getValue());
            }
            ++columnIndex;
        }
        return outputTextTable;
View Full Code Here

        //logger.info("Map size returning is " + memcacheStats.size());

        //produce a TextTable for each server listed
        Map<String, TextTable> returnMap = new HashMap<String, TextTable>();

        TextTable outputTextTable = null;

        Set<Map.Entry> statEntries = memcacheStats.entrySet();

        //set counter to allow to set number of columns to output
        for (Map.Entry statEntry : statEntries) {
            String key = (String) statEntry.getKey();
            Map statsMap = (Map) statEntry.getValue();
            //is this case, it is a Map with the statistics
            //get size so we know how big to make TextTable
            outputTextTable = new TextTable(statsMap.size(), 2);
            //set Header
            outputTextTable.setHeader(0, "Parameter");
            outputTextTable.setHeader(1, "Value");
            //outputTextTable.setHeader(2, "for " + key);
            //get this value's iterator
            Set<Map.Entry> statsMapEntries = statsMap.entrySet();
            int counter=0;
            for (Map.Entry statsMapEntry : statsMapEntries) {
                outputTextTable.setField(counter, 0,
                        (CharSequence) statsMapEntry.getKey());
                outputTextTable.setField(counter++, 1,
                        (CharSequence) statsMapEntry.getValue());
            }
            //add each TextTable for each server listed to return Map.
            returnMap.put(key, outputTextTable);
View Full Code Here

        String elapsedSecs = String.format("%.3f", elapsed/1000d);

        Map memcacheStats = cache.stats();
        //cache.stats() will return a Map whose key  is the name of the memcache server
        //and whose value is a Map with the memcache statistics
        TextTable outputTextTable = null;
        Set<Map.Entry> serverEntries = memcacheStats.entrySet();

        //set counter to allow to set number of columns to output
        int counter = 0;
        int columnIndex = 0;

        //reset the iterator
        for (Map.Entry serverEntry : serverEntries) {
            String key = (String) serverEntry.getKey();
            Map statsMap = (Map) serverEntry.getValue();
            if (outputTextTable == null) {
                // One extra row for elapsed time, one extra header column.
                outputTextTable = new TextTable(statsMap.size(),
                        serverEntries.size() + 2);
            }
            //is this case, it is a Map with the statistics
            //get size so we know how big to make TextTable
            // the number of rows is the number of stats
            // the number of columns is how many server instances there are
            //set Header
            outputTextTable.setHeader(0, "Elapsed (sec)");
            outputTextTable.setHeader(1, "Parameter");
            outputTextTable.setHeader(columnIndex + 2, key);

            //get this value's iterator
            Set<Map.Entry> statsMapEntries = statsMap.entrySet();
            counter=0; //reset counter

            // Populate the rest of the table.
            for (Map.Entry statsMapEntry : statsMapEntries) {
                outputTextTable.setField(counter, 0, elapsedSecs);
                outputTextTable.setField(counter, 1,
                        (CharSequence) statsMapEntry.getKey());
                outputTextTable.setField(counter++, columnIndex + 2,
                        (CharSequence) statsMapEntry.getValue());
            }
            ++columnIndex;
        }
        return outputTextTable;
View Full Code Here

                {
                    smallestList = mapOutputRecords.size();
                }
             
                //first we will develop the job files
                TextTable jobTable = new TextTable(smallestList, 11);
               
                //set the headers
                jobTable.setHeader(0, "Time");
                jobTable.setHeader(1, "Data local map tasks ");
                jobTable.setHeader(2, "Map input records ");
                jobTable.setHeader(3, "Combine output records");
                jobTable.setHeader(4, "Output bytes");
                jobTable.setHeader(5, "Input bytes");
                jobTable.setHeader(6, "HDFS bytes read");
                jobTable.setHeader(7, "Launched map tasks");
                jobTable.setHeader(8, "Combine input records");
                jobTable.setHeader(9, "Local bytes written");
                jobTable.setHeader(10, "Map output records");


                TextTable outputTime = new TextTable(smallestList, 2);
                outputTime.setHeader(0, "Seconds");
                outputTime.setHeader(1, "Map Output bytes/sec");

                TextTable inputTime = new TextTable(smallestList, 2);
                inputTime.setHeader(0, "Seconds");
                inputTime.setHeader(1, "Map Input bytes/sec");

                TextTable outputPeak = new TextTable(1,3);
                outputPeak.setHeader(0, "Timestamp");
                outputPeak.setHeader(1, "Peak Map Output Bytes rate");
                outputPeak.setHeader(2, "Sample point (seconds)");

                TextTable inputPeak = new TextTable(1,3);
                inputPeak.setHeader(0, "Timestamp");
                inputPeak.setHeader(1, "Peak Map Input Bytes rate");
                inputPeak.setHeader(2, "Sample point (seconds)");



                int row = 0;
                int counter = jobRecordArray.size();
                int peakInputByteRate = 0;
                int peakOutputByteRate = 0;
                int runPeakOutputByteRate = 0;
                int runPeakInputByteRate = 0;
                int peakInputSamplePoint = 0;
                int peakOutputSamplePoint = 0;
                String peakInputTimestamp = null;
                String peakOutputTimestamp = null;

                //all of these are placed outside of the loop for efficiency
                String currentOutputBytes = null;
                String previousOutputBytes = null;
                String currentInputBytes = null;
                String previousInputBytes = null;
                String currentOutputRate = null;
                String currentInputRate = null;
               
                for(row = 0; row < smallestList;row++)
                {                   
                    jobTable.setField(row, 0, new String(""+(row +1)*10));//newdateFormat.format(dataLocalMapTasks.get(row).getTimestamp().getTime()));
                    jobTable.setField(row, 1, dataLocalMapTasks.get(row).getValue());
                    jobTable.setField(row, 2, mapInputRecords.get(row).getValue());
                    jobTable.setField(row, 3, combineOutputRecords.get(row).getValue());
                    jobTable.setField(row, 4, outputBytes.get(row).getValue());
                    jobTable.setField(row, 5, inputBytes.get(row).getValue());
                    jobTable.setField(row, 6, hdfsRead.get(row).getValue());
                    jobTable.setField(row, 7, launchedTasks.get(row).getValue());
                    jobTable.setField(row, 8, combineInputRecords.get(row).getValue());
                    jobTable.setField(row, 9, localBytesWritten.get(row).getValue());
                    jobTable.setField(row, 10, mapOutputRecords.get(row).getValue());

                    //set the relative time we know the samples are taken at 10 second intervals
                    outputTime.setField(row, 0, new String(""+(row +1)*10));
                    inputTime.setField(row, 0, new String(""+(row +1)*10));
                   

                    if(row > 0)
                    {
                        //we do not want to divide by zero or wast time formatting or subtracting the string
                        if((outputBytes.get(row -1).getValue().compareTo("0") != 0) && (outputBytes.get(row).getValue().compareTo("0") != 0))
                        {
                            currentOutputBytes = outputBytes.get(row).getValue();
                            previousOutputBytes = outputBytes.get(row -1).getValue();
                            currentInputBytes = inputBytes.get(row).getValue();
                            previousInputBytes = inputBytes.get(row -1).getValue();
                            //this is a little confusing but I am dividing the result of the stringSubtraction by 10 which needs to be a String to use
                            currentOutputRate = stringDivide(stringSubtract(currentOutputBytes, previousOutputBytes), new String(""+10));
                            currentInputRate = stringDivide(stringSubtract(currentInputBytes, previousInputBytes), new String(""+10));
                           
                            outputTime.setField(row, 1,currentOutputRate);
                            inputTime.setField(row, 1, currentInputRate);

                            if(new Integer(currentOutputRate).intValue() > peakOutputByteRate)
                            {
                                peakOutputByteRate = new Integer(currentOutputRate).intValue();
                                Date timestamp = dataLocalMapTasks.get(row).getTimestamp();
                                if (timestamp != null)
                                    peakOutputTimestamp = newdateFormat.format(timestamp);
                                peakOutputSamplePoint = (row +1)*10;
                            }
                           
                            if(new Integer(currentInputRate).intValue() > peakInputByteRate)
                            {
                                peakInputByteRate = new Integer(currentInputRate).intValue();
                                Date timestamp = dataLocalMapTasks.get(row).getTimestamp();
                                if (timestamp != null)
                                    peakInputTimestamp = newdateFormat.format(timestamp);
                                peakInputSamplePoint = (row +1)*10;
                            }
                        }
                       
                    }else
                    {
                        outputTime.setField(row, 1, stringDivide(formatHadoopStringNumber(outputBytes.get(row).getValue()),new String(""+10)));
                        inputTime.setField(row, 1, stringDivide(formatHadoopStringNumber(inputBytes.get(row).getValue()), new String(""+10)));
                    }
                }

                //now configure the peak values tables
                inputPeak.setField(0, 0, "N/A");
                inputPeak.setField(0, 1, new String("" +peakInputByteRate));
                inputPeak.setField(0, 2, new String("" +peakInputSamplePoint));

                outputPeak.setField(0, 0, "N/A");
                outputPeak.setField(0, 1, new String(""+peakOutputByteRate));
                outputPeak.setField(0, 2, new String(""+peakOutputSamplePoint));

                //write the file
                try
                {
                    //BufferedWriter bfw_metrics = new BufferedWriter(new FileWriter(hadoopConfPath + File.separator + "hadoop-logrecords.properties-"+host));
                    BufferedWriter bfw_metrics = new BufferedWriter(new FileWriter(RunContext.getOutDir() + "hadoop_metrics_jobRecords.xan."+host));

                    StringBuffer pageBuffer = new StringBuffer();
                    StringBuffer timeInputBuffer = new StringBuffer();
                    StringBuffer timeOutputBuffer = new StringBuffer();
                    StringBuffer peakInputBuffer= new StringBuffer();
                    StringBuffer peakOutputBuffer = new StringBuffer();

                    StringBuffer inputBuffer = new StringBuffer();
                    StringBuffer outputBuffer = new StringBuffer();

                    StringBuffer heading = new StringBuffer();

                    heading.append("Title:" + "Hadoop JobRecord Metric Results");
                    heading.append("\n");

                    pageBuffer.append("\n");
                    pageBuffer.append("\n");
                    pageBuffer.append("Section:" + "Hadoop JobRecord Metric");
                    pageBuffer.append("\n");

                    inputBuffer.append("\n");
                    inputBuffer.append("\n");
                    inputBuffer.append("Section: Map Input bytes/sec\n");
                    inputBuffer.append("Display: Line\n");

                    outputBuffer.append("\n");
                    outputBuffer.append("\n");
                    outputBuffer.append("Section: Map Output bytes/sec\n");
                    outputBuffer.append("Display: Line\n");


                    peakInputBuffer.append("\n");
                    peakInputBuffer.append("\n");
                    peakInputBuffer.append("Section: Peak Map Input Bytes rate\n");
                    //peakInputBuffer.append("Display: Line\n");

                    peakOutputBuffer.append("\n");
                    peakOutputBuffer.append("\n");
                    peakOutputBuffer.append("Section: Peak Map Output Bytes rate\n");
                    //peakOutputBuffer.append("Display: Line\n");

                  
                    bfw_metrics.write(heading.toString());
                    bfw_metrics.write(inputBuffer.toString());
                    bfw_metrics.write(inputTime.format(timeInputBuffer).toString());
                    bfw_metrics.write(outputBuffer.toString());
                    bfw_metrics.write(outputTime.format(timeOutputBuffer).toString());

                   
                    bfw_metrics.write(inputPeak.format(peakInputBuffer).toString());
                   
                    bfw_metrics.write(outputPeak.format(peakOutputBuffer).toString());

                    bfw_metrics.write(jobTable.format(pageBuffer).toString());

                    bfw_metrics.close();
                    bfw_metrics = null;
                    logger.info("File written " + RunContext.getOutDir() + "hadoop_metrics_jobRecords." +host);

                }catch(IOException ioe)
                {
                    logger.warning("Could not create logrecords output file (hadoop_metrics_jobRecords) in generateTableFromMetrics" + ioe.getMessage());
                }

            }else
            {
                logger.warning("jobArray was empty or null in GridMix.generateTableFromMetrics()");
            }

            if((jobTrackerArray != null)&&(jobTrackerArray.size() > 0))
            {
                for(Iterator<AbstractLogRecord> it = jobTrackerArray.iterator(); it.hasNext();)
                {
                    JobtrackerRecord jr = (JobtrackerRecord)it.next();
                    jobRecordTrackerArray.add(jr);
                }

                //next we will develop the jobTracker file
                TextTable jobTrackerTable = new TextTable(jobRecordTrackerArray.size(), 5);

                //set the headers
                jobTrackerTable.setHeader(0, "Time");
                jobTrackerTable.setHeader(1, "Maps launched");
                jobTrackerTable.setHeader(2, "Maps finished");
                jobTrackerTable.setHeader(3, "Reducers launched");
                jobTrackerTable.setHeader(4, "Reducers finished");


                TextTable mapLaunched = new TextTable(jobRecordTrackerArray.size(),2);
                mapLaunched.setHeader(0, "Relative Time");
                mapLaunched.setHeader(1, "Maps Launched/sec");

                TextTable mapFinished = new TextTable(jobRecordTrackerArray.size(),2);
                mapFinished.setHeader(0, "Relative Time");
                mapFinished.setHeader(1, "Maps Finished/sec");
                        
                for(int row = 0; row < jobRecordTrackerArray.size();row++)
                {                   
                    jobTrackerTable.setField(row, 0, "N/A");
                    jobTrackerTable.setField(row, 1, jobRecordTrackerArray.get(row).getMaps_launched());
                    jobTrackerTable.setField(row, 2, jobRecordTrackerArray.get(row).getMaps_completed());
                    jobTrackerTable.setField(row, 3, jobRecordTrackerArray.get(row).getReduces_launched());
                    jobTrackerTable.setField(row, 4, jobRecordTrackerArray.get(row).getReduces_completed());

                    //set the relative time we know the samples are taken at 10 second intervals
                    mapLaunched.setField(row, 0, new String(""+((row +1)*10)));
                    mapFinished.setField(row,0, new String(""+((row +1)*10)));
                    if(row > 0)
                    {
                        //outputTime.setField(row, 1,stringSubtract(outputBytes.get(row).getValue(), outputBytes.get(row -1).getValue()) );
                        mapLaunched.setField(row, 1, stringSubtract(jobRecordTrackerArray.get(row).getMaps_launched(), jobRecordTrackerArray.get(row -1).getMaps_launched()));
                        mapFinished.setField(row, 1, stringSubtract(jobRecordTrackerArray.get(row).getMaps_completed(), jobRecordTrackerArray.get(row -1).getMaps_completed()));
                       
                        //mapFinished.setField(row, 1, jobTracker.getMaps_completed());
                    }else
                    {
                        mapLaunched.setField(row, 1, jobRecordTrackerArray.get(row).getMaps_launched());
                        mapFinished.setField(row, 1, jobRecordTrackerArray.get(row).getMaps_completed());
                    }

                   
                }

                //write the file
                try
                {
                    BufferedWriter bfw_metrics = new BufferedWriter(new FileWriter(RunContext.getOutDir() + "hadoop_metrics_jobTrackerRecords.xan."+host));
                    StringBuffer sb = new StringBuffer();
                    StringBuffer sbl = new StringBuffer();
                    StringBuffer sbf = new StringBuffer();

                    StringBuffer launched = new StringBuffer();
                    StringBuffer finished = new StringBuffer();

                    StringBuffer heading = new StringBuffer();

                    heading.append("Title:" + "Hadoop JobTrackerRecord Metric Results");

                    sb.append("\n");
                    sb.append("\n");
                    sb.append("Section: JobTrackerRecord Metrics\n");


                    launched.append("\n");
                    launched.append("\n");
                    launched.append("Section: Launched Map Metrics\n");
                    launched.append("Display: Line\n");

                    finished.append("\n");
                    finished.append("\n");
                    finished.append("Section: Finished Map Metrics\n");
                    finished.append("Display: Line\n");

                    bfw_metrics.write(heading.toString());
                    bfw_metrics.write(launched.toString());
                    bfw_metrics.write(mapLaunched.format(sbl).toString());
                    bfw_metrics.write(finished.toString());
                    bfw_metrics.write(mapFinished.format(sbf).toString());
                    bfw_metrics.write(jobTrackerTable.format(sb).toString());
                    bfw_metrics.close();
                    bfw_metrics = null;
                    logger.info("File written " + RunContext.getOutDir() + "hadoop_metrics_jobTrackerRecords." +host);

                }catch(IOException ioe)
                {
                    logger.warning("Could not create logrecords output file (hadoop_metrics_jobTrackerRecords) in generateTableFromMetrics" + ioe.getMessage());
                }
            }else
            {
                logger.warning("jobTrackerArray was empty or null in GridMix.generateTableFromMetrics()");
            }
           
            if((shuffleInputArray != null) && (shuffleInputArray.size() > 0))
            {
                for(Iterator<AbstractLogRecord> it = shuffleInputArray.iterator(); it.hasNext();)
                {
                    ShuffleInputRecord jr = (ShuffleInputRecord)it.next();
                    shuffleInputRecordArray.add(jr);
                }

                //next we will develop the jobTracker file
                TextTable shuffleInputTable = new TextTable(shuffleInputArray.size(), 5);
                //set the headers
                shuffleInputTable.setHeader(0, "Relative Time");               
                shuffleInputTable.setHeader(1, "Shuffle failed fetches");
                shuffleInputTable.setHeader(2, "Shuffle fetchers busy percent");
                shuffleInputTable.setHeader(3, "Shuffle input bytes");
                shuffleInputTable.setHeader(4, "Shuffle success fetches");


                TextTable failed = new TextTable(shuffleInputArray.size(), 2);
                failed.setHeader(0, "Relative Time");
                failed.setHeader(1, "Failed fetches");

                TextTable busy = new TextTable(shuffleInputArray.size(), 2);
                busy.setHeader(0, "Relative Time");
                busy.setHeader(1, "Fetches % Busy");

                TextTable input = new TextTable(shuffleInputArray.size(), 2);
                input.setHeader(0, "Relative Time");
                input.setHeader(1, "Input Bytes/sec");

                TextTable success = new TextTable(shuffleInputArray.size(), 2);
                success.setHeader(0, "Relative Time");
                success.setHeader(1, "Success fetches");
                               
                String currentValue;
                String previousValue;

                for(int row = 0; row < shuffleInputRecordArray.size();row++)
                {                   
                    shuffleInputTable.setField(row, 0, new String(""+((row +1)*10)));
                    shuffleInputTable.setField(row, 1, shuffleInputRecordArray.get(row).getShuffle_failed_fetches());
                    shuffleInputTable.setField(row, 2, shuffleInputRecordArray.get(row).getShuffle_fetchers_busy_percent());
                    shuffleInputTable.setField(row, 3, shuffleInputRecordArray.get(row).getShuffle_input_bytes());
                    shuffleInputTable.setField(row, 4, shuffleInputRecordArray.get(row).getShuffle_success_fetches());


                    failed.setField(row, 0, new String(""+((row +1)*10)));
                    failed.setField(row, 1, shuffleInputRecordArray.get(row).getShuffle_failed_fetches());

                    busy.setField(row, 0, new String(""+((row +1)*10)));
                    busy.setField(row, 1, shuffleInputRecordArray.get(row).getShuffle_fetchers_busy_percent());

                    input.setField(row, 0, new String(""+((row +1)*10)));
                    if(row != 0)
                    {
                        currentValue = shuffleInputRecordArray.get(row).getShuffle_input_bytes();
                        previousValue = shuffleInputRecordArray.get(row - 1).getShuffle_input_bytes();
                        input.setField(row, 1, stringDivide(stringSubtract(currentValue, previousValue), new String(""+10)));
                    }else
                    {
                        input.setField(row, 1, stringDivide(shuffleInputRecordArray.get(row).getShuffle_input_bytes(), new String(""+10)));
                    }                   

                    success.setField(row, 0, new String(""+((row +1)*10)));
                    success.setField(row, 1, shuffleInputRecordArray.get(row).getShuffle_success_fetches());

                    currentValue = null;
                    previousValue = null;
                   
                }

                //write the file
                try
                {
                    BufferedWriter bfw_metrics = new BufferedWriter(new FileWriter(RunContext.getOutDir() + "hadoop_metrics_shuffleInputRecords.xan."+host));
                    StringBuffer sb = new StringBuffer();
                    StringBuffer sbf = new StringBuffer();
                    StringBuffer sbb = new StringBuffer();
                    StringBuffer sbi = new StringBuffer();
                    StringBuffer sbs = new StringBuffer();

                    StringBuffer failedsb = new StringBuffer();
                    StringBuffer busysb = new StringBuffer();
                    StringBuffer inputsb = new StringBuffer();
                    StringBuffer successdb = new StringBuffer();
                    StringBuffer heading = new StringBuffer();



                    heading.append("Title:" + "Hadoop ShuffleInput Metric Results");                                       

                    sb.append("\n");
                    sb.append("\n");
                    sb.append("Section: Hadoop ShuffleInput Metric Data\n");

                    failedsb.append("\n");
                    failedsb.append("\n");
                    failedsb.append("Section: Hadoop ShuffleInput Failed Data\n");
                    failedsb.append("Display: Line\n");

                    busysb.append("\n");
                    busysb.append("\n");
                    busysb.append("Section: Hadoop ShuffleInput Busy Data\n");
                    busysb.append("Display: Line\n");

                    inputsb.append("\n");
                    inputsb.append("\n");
                    inputsb.append("Section: Hadoop ShuffleInput Input Data\n");
                    inputsb.append("Display: Line\n");

                    successdb.append("\n");
                    successdb.append("\n");
                    successdb.append("Section: Hadoop ShuffleInput Successful Fetches Data\n");
                    successdb.append("Display: Line\n");

                    bfw_metrics.write(heading.toString());
                    bfw_metrics.write(failedsb.toString());
                    bfw_metrics.write(failed.format(sbf).toString());

                    bfw_metrics.write(inputsb.toString());
                    bfw_metrics.write(input.format(sbi).toString());

                    bfw_metrics.write(busysb.toString());
                    bfw_metrics.write(busy.format(sbb).toString());
                
                    bfw_metrics.write(successdb.toString());
                    bfw_metrics.write(success.format(sbs).toString());

                    bfw_metrics.write(shuffleInputTable.format(sb).toString());
                   
                    bfw_metrics.close();
                    bfw_metrics = null;
                    logger.info("File written " + RunContext.getOutDir() + "hadoop_metrics_shuffleInputRecords." +host);

                }catch(IOException ioe)
                {
                    logger.warning("Could not create logrecords output file (hadoop_metrics_shuffleInputRecords) in generateTableFromMetrics" + ioe.getMessage());
                }

            }else
            {
                logger.warning("shuffleInputArray was empty or null in GridMix.generateTableFromMetrics()");
            }

            if((shuffleOutputArray != null) && (shuffleOutputArray.size() > 0))
            {
                for(Iterator<AbstractLogRecord> it = shuffleOutputArray.iterator(); it.hasNext();)
                {
                    ShuffleOutputRecord jr = (ShuffleOutputRecord)it.next();
                    shuffleOutputRecordArray.add(jr);
                }

                //next we will develop the jobTracker file
                TextTable shufflePutputTable = new TextTable(shuffleOutputArray.size(), 5);
                //set the headers
                shufflePutputTable.setHeader(0, "Relative Time");
                shufflePutputTable.setHeader(1, "Shuffle failed outputs");
                shufflePutputTable.setHeader(2, "Shuffle handler busy percent");
                shufflePutputTable.setHeader(3, "Shuffle output bytes");
                shufflePutputTable.setHeader(4, "Shuffle_success_outputs");

                TextTable failed = new TextTable(shuffleOutputArray.size(), 2);
                failed.setHeader(0, "Relative Time");
                failed.setHeader(1, "Shuffle failed outputs");

                TextTable busy = new TextTable(shuffleOutputArray.size(), 2);
                busy.setHeader(0, "Relative Time");
                busy.setHeader(1, "Shuffle handler % busy");

                TextTable output = new TextTable(shuffleOutputArray.size(), 2);
                output.setHeader(0, "Relative Time");
                output.setHeader(1, "Shuffle output bytes/sec");

                TextTable success = new TextTable(shuffleOutputArray.size(), 2);
                success.setHeader(0, "Relative Time");
                success.setHeader(1, "Shuffle_success_outputs");

                for(int row = 0; row < shuffleOutputRecordArray.size(); row++)
                {                   
                    shufflePutputTable.setField(row, 0, new String(""+((row +1)*10)));
                    shufflePutputTable.setField(row, 1, shuffleOutputRecordArray.get(row).getShuffle_failed_outputs());
                    shufflePutputTable.setField(row, 2, shuffleOutputRecordArray.get(row).getShuffle_handler_busy_percent());
                    shufflePutputTable.setField(row, 3, shuffleOutputRecordArray.get(row).getShuffle_output_bytes());
                    shufflePutputTable.setField(row, 4, shuffleOutputRecordArray.get(row).getShuffle_success_outputs());

                    failed.setField(row, 0, new String(""+((row +1)*10)));
                    busy.setField(row, 0, new String(""+((row +1)*10)));
                    output.setField(row, 0, new String(""+((row +1)*10)));
                    success.setField(row, 0, new String(""+((row +1)*10)));

                    failed.setField(row, 1, shuffleOutputRecordArray.get(row).getShuffle_failed_outputs());
                    busy.setField(row, 1, shuffleOutputRecordArray.get(row).getShuffle_handler_busy_percent());
                    if(row != 0)
                    {
                        output.setField(row, 1, shuffleOutputRecordArray.get(row).getShuffle_output_bytes());
                    }else
                    {
                        output.setField(row, 1, shuffleOutputRecordArray.get(row).getShuffle_output_bytes());
                    }
                   
                    success.setField(row, 1, shuffleOutputRecordArray.get(row).getShuffle_success_outputs());
                   
                }

                //write the file
                try
                {
                    BufferedWriter bfw_metrics = new BufferedWriter(new FileWriter(RunContext.getOutDir() + "hadoop_metrics_shuffleOutputRecords.xan."+host));
                    StringBuffer sb = new StringBuffer();
                    StringBuffer sbf = new StringBuffer();
                    StringBuffer sbb = new StringBuffer();
                    StringBuffer sbo = new StringBuffer();
                    StringBuffer sbs = new StringBuffer();

                    StringBuffer failed_sb = new StringBuffer();
                    StringBuffer busy_sb = new StringBuffer();
                    StringBuffer output_sb = new StringBuffer();
                    StringBuffer success_sb = new StringBuffer();

                    StringBuffer heading = new StringBuffer();

                    heading.append("Title:" + "Hadoop ShuffleOutput Metric Results");

                    sb.append("\n");
                    sb.append("\n");
                    sb.append("Section: ShuffleOutput Metrics\n");                   

                    failed_sb.append("\n");
                    failed_sb.append("\n");
                    failed_sb.append("Section: ShuffleOutput Failed Metrics\n");
                    failed_sb.append("Display: Line\n");

                    busy_sb.append("\n");
                    busy_sb.append("\n");
                    busy_sb.append("Section: ShuffleOutput % Busy Metrics\n");
                    busy_sb.append("Display: Line\n");

                    output_sb.append("\n");
                    output_sb.append("\n");
                    output_sb.append("Section: ShuffleOutput Output Metrics\n");
                    output_sb.append("Display: Line\n");

                    success_sb.append("\n");
                    success_sb.append("\n");
                    success_sb.append("Section: ShuffleOutput Successful Shuffle Metrics\n");
                    success_sb.append("Display: Line\n");


                    bfw_metrics.write(heading.toString());
                    bfw_metrics.write(failed_sb.toString());
                    bfw_metrics.write(failed.format(sbf).toString());

                    bfw_metrics.write(busy_sb.toString());
                    bfw_metrics.write(busy.format(sbb).toString());

                    bfw_metrics.write(output_sb.toString());
                    bfw_metrics.write(output.format(sbo).toString());

                    bfw_metrics.write(success_sb.toString());
                    bfw_metrics.write(success.format(sbs).toString());


                    bfw_metrics.write(shufflePutputTable.format(sb).toString());
                    bfw_metrics.close();
                    bfw_metrics = null;
View Full Code Here

        break;

            if (outputTextTable == null) {
                // the number of rows is #of servers (for each interval)
                // One extra column for server name
                outputTextTable = new TextTable(serverEntries.size(), NUM_COLS + 1);

                // the number of columns is the stats that we gather
                //set Header
                outputTextTable.setHeader(0, "Server");
                outputTextTable.setHeader(CURTIME, "Time");
View Full Code Here

    * @throws IOException if runDir or certain files in it can't be accessed or
    *            file format is incorrect
    */
    public void compare(String runDirs[], String outDir) throws IOException {
        String outFile = outDir + File.separator + "compare.xan";
        TextTable infoTable, thruTable, opAvgThruTable = null, opThruTable;
        TextTable respTable, percentRespTable[] = null, avgRespTable = null, cpuTable = null;
        String thruMetric = null, respMetric = null;
        PrintWriter p;
        ArrayList<String> rtAvgList;
        ArrayList<String> rtPercentNames = null;
        ArrayList<String> opNames = new ArrayList<String>();

        ArrayList<Double> thruList[] = new ArrayList[runDirs.length];
        ArrayList<Double> opThruList[][] = null;
        ArrayList<Double> timeVals = new ArrayList<Double>();
        ArrayList<Double> timeDistVals = new ArrayList<Double>();
        List<String> cpuList[];
        int maxThruRows = 0, maxDistRows = 0;
        ArrayList<Double> respList[][] = null;
        ArrayList<Integer> respDistList[][] = null;
        infoTable = new TextTable(runDirs.length, 4);
        infoTable.setHeader(0, "RunID");
        infoTable.setHeader(1, "Avg. Throughput");
        infoTable.setHeader(2, "Passed");
        infoTable.setHeader(3, "Description");


        p = openOutFile(outFile, "Compare");
        for (int i = 0; i < runDirs.length; i++) {
            String sumFile = getSumFile(runDirs[i]);
            String detFile = getDetFile(runDirs[i]);

            reader = new XMLReader(sumFile, false, false);

            // parse Run Info section of summary file
            getRunInfo(runDirs[i], reader, infoTable, i);

            // parse throughput section of detail.xan
            DetailReport detail = new DetailReport(detFile);
            thruList[i] = detail.getThruput();
            ArrayList<Double> thisTimeVals = detail.getTimes();

            if (thruList[i].size() > maxThruRows) {
                maxThruRows = thruList[i].size();
                timeVals = thisTimeVals;
            }
            ArrayList<Double> thisDistTimeVals = detail.getTimesDist();
            if (thisDistTimeVals.size() > maxDistRows) {
                maxDistRows = thisDistTimeVals.size();
                timeDistVals = thisDistTimeVals;
            }

            // read the metric from the first file
            if (i == 0) {
                respMetric = getRespUnit(reader);
                thruMetric = getThruUnit(reader);
            }

            // Now get the response times
            rtAvgList = new ArrayList<String>();
            rtPercentNames = new ArrayList<String>();
            try {
                getResponseTimes(reader, opNames, rtAvgList, rtPercentNames);
            } catch (IOException ie) {
                throw new IOException(ie.getMessage() + " : " + sumFile);
            }

            // Get cpu util.
            cpuList = getCpuUtil(Config.OUT_DIR + runDirs[i]);

            if (i == 0) {
                respList = new ArrayList[runDirs.length][opNames.size()];
                respDistList = new ArrayList[runDirs.length][opNames.size()];

                // thru. table on a per operation basis
                opAvgThruTable = new TextTable(runDirs.length, opNames.size() + 1);
                opAvgThruTable.setHeader(0, "RunID");

                opThruList = new ArrayList[runDirs.length][opNames.size()];

                /*
                 * We have the following types of RT tables:
                 * a) Avg. RT which simply has one row per run listing avg RT. of each operation
                 * b) nth Percentile RT tables - same info as above
                 * c) Detailed RT - one table per operation listing RT over time
                 */
                avgRespTable = new TextTable(runDirs.length, opNames.size() + 1);
                avgRespTable.setHeader(0, "RunID");

                //percentile resp. tables (e.g. 90th, 99th)
                percentRespTable = new TextTable[rtPercentNames.size()];
                for (int j = 0; j < rtPercentNames.size(); j++) {
                    percentRespTable[j] = new TextTable(runDirs.length, opNames.size() + 1);
                    percentRespTable[j].setHeader(0, "RunID");
                }

                for (int j = 0; j < opNames.size(); j++) {
                    opAvgThruTable.setHeader(j + 1, opNames.get(j));
                    avgRespTable.setHeader(j + 1, opNames.get(j));
                    for (int k = 0; k < rtPercentNames.size(); k++) {
                        percentRespTable[k].setHeader(j + 1, opNames.get(j));
                    }
                }

                /*** CPU info is tough for comparison. The hosts may not be the same */
                // CPU info. We print 1 row per RunID with each col. being cpu% for one host
                cpuTable = new TextTable(runDirs.length, cpuList.length + 1);
                cpuTable.setHeader(0, "RunID");
                for (int j = 0; j < cpuList.length; j++) {
                    cpuTable.setHeader(j + 1, cpuList[j].get(0));    // hostname
                }
            }


            // Get avg. thruput per operation
            opAvgThruTable.setField(i, 0, runDirs[i]);
            avgRespTable.setField(i, 0, runDirs[i]);
            for (int j = 0; j < opNames.size(); j++) {
                opAvgThruTable.setField(i, j + 1, String.format("%4.3f", detail.getOpAvgThruput(j)));
                opThruList[i][j] = detail.getOpThruput(j);
                respList[i][j] = detail.getOpRT(j);
                respDistList[i][j] = detail.getOpRTDist(j);
                avgRespTable.setField(i, j + 1, rtAvgList.get(j));
                for (int k = 0; k < rtPercentNames.size(); k++) {
                    percentRespTable[k].setField(i, 0, runDirs[i]);
                    percentRespTable[k].setField(i, j + 1, rtPercentList[k].get(j));
                }
            }
            cpuTable.setField(i, 0, runDirs[i]);
            for (int j = 0; j < cpuList.length; j++) {
                cpuTable.setField(i, j + 1, cpuList[j].get(1))// avg. util
            }
        }

        // Create the thruput tables. We create it with the largest #rows
        thruTable = new TextTable(maxThruRows, runDirs.length + 1); //1st col is time
        thruTable.setHeader(0, "Time");
        for (int i = 0; i < timeVals.size(); i++)
            thruTable.setField(i, 0, timeVals.get(i).toString());

        for (int i = 0; i < runDirs.length; i++) {
            thruTable.setHeader(i + 1, runDirs[i]);
            int j;
            for (j = 0; j < thruList[i].size(); j++) {
                thruTable.setField(j, i + 1, thruList[i].get(j).toString());
            }
            int rem = timeVals.size() - thruList[i].size();
            // Fill remaning timevals with dash - null value for .xan
            for (; rem > 0; rem--) {
                thruTable.setField(j++, i + 1, "-");
            }
        }
        // Print out the TextTables
        p.println("Section: Run Information");
        p.println(infoTable.toString());

        p.println("Section: Overall Throughput (" + thruMetric + ")");
        p.println("Display: Line");
        p.println(thruTable.toString());

        p.println("Section: Summary Throughput Per Operation (" + thruMetric + ")");
        p.println(opAvgThruTable.toString());

        // Print detailed thruput only if more than one operation as otherwise overall thruput is same as detail
        if (opNames.size() > 1) {
            for (int k = 0; k < opNames.size(); k++) {
                p.println("Section: Detailed Throughput For  Operation '" +
                        opNames.get(k) + "' (" + thruMetric + ")");
                p.println("Display: Line");
                opThruTable = new TextTable(maxThruRows, runDirs.length + 1);
                opThruTable.setHeader(0, "Time");
                for (int i = 0; i < timeVals.size(); i++)
                    opThruTable.setField(i, 0, timeVals.get(i).toString());

                for (int i = 0; i < runDirs.length; i++) {
                    opThruTable.setHeader(i + 1, runDirs[i]);
                    int j;
                    for (j = 0; j < opThruList[i][k].size(); j++) {
                        opThruTable.setField(j, i + 1, opThruList[i][k].get(j).toString());
                    }
                    // Fill with dash for remaining time intervals (if any)
                    int rem = timeVals.size() - opThruList[i][k].size();
                    for (; rem > 0; rem--) {
                        opThruTable.setField(j++, i + 1, "-");
                    }
                }
                p.println(opThruTable.toString());
            }
        }

        p.println("Section: Average Response Times (" + respMetric + ")");
        p.println(avgRespTable.toString());

        // Print nth percentile RT info
        for (int j = 0; j < rtPercentList.length; j++) {
            p.println("Section: " + rtPercentNames.get(j) + " Percentile Response Times (" + respMetric + ")");
            p.println(percentRespTable[j].toString());
        }

        for (int k = 0; k < opNames.size(); k++) {
            p.println("Section: Average Response Times for Operation '" +
                    opNames.get(k) + "' (" + respMetric + ")");
            p.println("Display: Line");
            respTable = new TextTable(maxThruRows, runDirs.length + 1);
            respTable.setHeader(0, "Time");

            // Set time column for all rows
            for (int i = 0; i < timeVals.size(); i++)
                respTable.setField(i, 0, timeVals.get(i).toString());

            for (int i = 0; i < runDirs.length; i++) {
                respTable.setHeader(i + 1, runDirs[i]);
                int j;
                for (j = 0; j < respList[i][k].size(); j++) {
                    respTable.setField(j, i + 1, respList[i][k].get(j).toString());
                }
                int rem = timeVals.size() - respList[i][k].size();
                for (; rem > 0; rem--) {
                    respTable.setField(j++, i + 1, "-");
                }
            }
            p.println(respTable.toString());
        }

        // Print distribution of response times
        for (int k = 0; k < opNames.size(); k++) {
            p.println("Section: Distribution of Response Times for Operation '" +
                    opNames.get(k) + "' (" + respMetric + ")");
            p.println("Display: Line");
            respTable = new TextTable(maxDistRows, runDirs.length + 1);
            respTable.setHeader(0, "Time");

            // Set time column for all rows
            for (int i = 0; i < timeDistVals.size(); i++)
                respTable.setField(i, 0, timeDistVals.get(i).toString());

            for (int i = 0; i < runDirs.length; i++) {
                respTable.setHeader(i + 1, runDirs[i]);
                int j;
                for (j = 0; j < respDistList[i][k].size(); j++) {
                    respTable.setField(j, i + 1, respDistList[i][k].get(j).toString());
                }
                int rem = timeDistVals.size() - respDistList[i][k].size();
                for (; rem > 0; rem--) {
                    respTable.setField(j++, i + 1, "-");
                }
            }
            p.println(respTable.toString());
        }

        // Print CPU utilization
        p.println("Section: Average CPU Utilization");
        p.println(cpuTable.toString());
View Full Code Here

        percentile = getValue(doc, "percentile", "nth", "95");
      System.out.println("95th %: " + percentile);
      percentile = getValue(doc, "percentile", "nth", "99");
      System.out.println("99th %: " + percentile);
        } else {
            TextTable table = new TextTable(txCount, 7);
            table.setHeader(0, "Response Times");
            table.setHeader(1, "Avg");
            table.setHeader(2, "Max");
            table.setHeader(3, "90th%");
            table.setHeader(4, "95th%");
            table.setHeader(5, "99th%");
            table.setHeader(6, "");

            for (int i = 0; i < txCount; i++) {
                Node opNode = nodeList.item(i);
                table.setField(i, 0, xPath.evaluate("@name", opNode));
                table.setField(i, 1, xPath.evaluate("avg", opNode));
                table.setField(i, 2, xPath.evaluate("max", opNode));
                table.setField(i, 3, getValue(opNode, "percentile", "nth", "90"));
                table.setField(i, 4, getValue(opNode, "percentile", "nth", "95"));
                table.setField(i, 5, getValue(opNode, "percentile", "nth", "99"));
                boolean passed = Boolean.parseBoolean(
                                    xPath.evaluate("passed", opNode));
                if (passed)
                    table.setField(i, 6, "PASSED");
                else
                    table.setField(i, 6, "FAILED");
            }
            table.format(System.out);
        }

        int users = Integer.parseInt(getValue(doc, "users"));
        double rt = Double.parseDouble(getValue(doc, "rtXtps"));
        if (users * .975 > rt)
View Full Code Here

TOP

Related Classes of com.sun.faban.common.TextTable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.