Examples of DataSet


Examples of net.sf.flatpack.DataSet

    public Object unmarshal(Exchange exchange, InputStream stream) throws Exception {
        InputStreamReader reader = new InputStreamReader(stream);
        try {
            Parser parser = createParser(exchange, reader);
            DataSet dataSet = parser.parse();
            return new DataSetList(dataSet);
        } finally {
            reader.close();
        }
    }
View Full Code Here

Examples of net.sf.javaml.core.Dataset

        if(itemClassMap.isEmpty()){
            throw new RuntimeException("Map should have at least one element!");
        }
       
        //initialize jml dataset
        Dataset jmlDataset = new DefaultDataset();
       
        //iterate through Map
        for(Map.Entry<double[],String> entry : itemClassMap.entrySet()){
           
            //initialize Instance from Map entry Key (double array) and Value (String) class value
            Instance dataRow = new DenseInstance(entry.getKey(), entry.getValue());
           
            //add Instance to jml dataset
            jmlDataset.add(dataRow);
        }
       
        return jmlDataset;
    }
View Full Code Here

Examples of net.sf.jmp3renamer.datamanager.DataSet

      if (DoubleFinder.getProperty("use_md5").equals("true")) {
        String md5 = MD5.calculate(element);
        dataset.setMd5sum(md5);
      }
      if (DoubleFinder.getProperty("use_tag").equals("true")) {
        DataSet tag = FileManager.getInstance().getMetaData(element);
        if(tag != null) {
            dataset.setTag(tag);
        }
      }
     
View Full Code Here

Examples of net.sf.jmp3renamer.plugins.DoubleFinder.Dataset

  }

  private void findDuplicates() {
    status.setText(I18N.translate("searching"));
    for (int i = 0; i < datasets.size() && running; i++) {
      Dataset dataset = (Dataset) datasets.get(i);
     
      int percent = (int) ((double) i / (double) datasets.size() * 100);
      progress.setValue(percent);

      for (int j = 0; j < datasets.size() && running; j++) {
        Dataset dataset2 = (Dataset) datasets.get(j);
        if (i == j) {
          continue;
        }

        // find duplicates via md5
        if (DoubleFinder.getProperty("use_md5").equals("true")) {
          if (dataset.getMd5sum().equals(dataset2.getMd5sum())) {
            Duplicate d;
            if (dataset.getDuplicate() != null) {
              d = dataset.getDuplicate();
            } else if (dataset2.getDuplicate() != null) {
              d = dataset2.getDuplicate();
            } else {
              d = new Duplicate();
              d.setPercentageOfReliability(100);
              dataset2.setDuplicate(d);
              d.addDuplicate(dataset2);
            }
            dataset.setDuplicate(d);
            d.addDuplicate(dataset);
            duplicates.add(d);
          }
        }

        // find duplicates via tag
        if (DoubleFinder.getProperty("use_tag").equals("true")) {
            if( !dataset.getArtist().equals("") ) {
              String artist1 = dataset.getArtist().toLowerCase();
              String artist2 = dataset2.getArtist().toLowerCase();
              int eqArtist = Utilities.percentageOfEquality(artist1, artist2);
             
              String title1 = dataset.getTitle().toLowerCase();
              String title2 = dataset2.getTitle().toLowerCase();
              int eqTitle = Utilities.percentageOfEquality(title1, title2);
             
            if( eqArtist > 80 && eqTitle > 80) {
              // soundex analysis
              int difference = -1;
              int length = 0;
              RefinedSoundex soundex = new RefinedSoundex();
              try {
                difference = soundex.difference(title1, title2);
                length = Math.max(soundex.encode(title1).length(), soundex.encode(title2).length());
              } catch (Exception e) {}
           
              double diff = (double)difference / (double)length;
              if(diff >= 0.75) {
                Duplicate d;
                if (dataset.getDuplicate() != null) {
                  d = dataset.getDuplicate();
                } else if (dataset2.getDuplicate() != null) {
                  d = dataset2.getDuplicate();
                } else {
                  d = new Duplicate();
                  d.setPercentageOfReliability( (int)(diff * 100) );
                  dataset2.setDuplicate(d);
                  d.addDuplicate(dataset2);
                }
                dataset.setDuplicate(d);
                d.addDuplicate(dataset);
                duplicates.add(d);
              }
            }
            }
        }
       
        // find duplicates via filename (levenshteindistanz)
        if (DoubleFinder.getProperty("use_filename").equals("true")) {
          String filename1 = dataset.getFilename().toLowerCase();
          String filename2 = dataset2.getFilename().toLowerCase();
          int equality = Utilities.percentageOfEquality(filename1, filename2);
          if(equality >= 80) {
            Duplicate d;
            if (dataset.getDuplicate() != null) {
              d = dataset.getDuplicate();
            } else if (dataset2.getDuplicate() != null) {
              d = dataset2.getDuplicate();
            } else {
              d = new Duplicate();
              d.setPercentageOfReliability(equality);
              dataset2.setDuplicate(d);
              d.addDuplicate(dataset2);
            }
            dataset.setDuplicate(d);
            d.addDuplicate(dataset);
            duplicates.add(d);
View Full Code Here

Examples of nz.co.abrahams.asithappens.storage.DataSet

     * "data.points.fetched.maximum".
     *
     */
    public void loadData() throws DBException {
        //long time;
        DataSet results;
        //DatabaseAccess db;
        DataSetsDAO dataSetsDAO;
        DataSetDAO dataSetDAO;
        int sets;
        String[] headings;
        long intervalStart;
        long intervalEnd;
        long intervalTime;
        int numValues;
        //double pointValue;
        int fetchedMaximum;
       
        dataSets.setSessionID(sessionID);
        dataSets.setStartTime(startTime);
        dataSets.setFinishTime(finishTime);
        try {
            dataSetsDAO = DAOFactory.getDataSetsDAO();
            dataSetDAO = DAOFactory.getDataSetDAO();
            dataSets.setDataType(DataType.types[dataSetsDAO.retrieveSessionDataTypeID(sessionID)]);
            dataSets.setDevice(new Device(dataSetsDAO.retrieveSessionDevice(sessionID)));
            dataSets.setPortString(dataSetsDAO.retrieveSessionPort(sessionID));
            dataSets.setPollInterval(dataSetsDAO.retrieveSessionPollInterval(sessionID));
            dataSets.setTitle(dataSetsDAO.retrieveSessionTitle(sessionID));
            dataSets.setDirection(dataSetsDAO.retrieveSessionDirection(sessionID));
            headings = DAOFactory.getDataHeadingsDAO().retrieve(sessionID);
            for ( int set = 0; set < headings.length; set++ ) {
                dataSets.addSet(headings[set]);
            }
            fetchedMaximum = Configuration.getPropertyInt("data.points.fetched.maximum");
           
            logger.debug("Fetching data for " + headings.length + " data sets ("
                    + startTime + " to " + finishTime + ")");
           
            sets = dataSets.getDataSetCount();
            taskLength = sets * fetchedMaximum;
            for ( int set = 0; set < sets; set++ ) {
                taskProgress = set * fetchedMaximum;
                //logger.debug("Current: " + current);
                numValues = dataSetsDAO.retrieveNumberOfPoints(sessionID, set, startTime, finishTime);
                if ( numValues <= fetchedMaximum ) {
                    dataSets.setDataSet(set, dataSetDAO.retrieve(sessionID, set, startTime, finishTime + 1));
                    checkCancelled();
                } else {
                    intervalTime = ( finishTime - startTime ) / fetchedMaximum;
                    for ( int interval = 0; interval < fetchedMaximum; interval++ ) {
                        intervalStart = startTime + intervalTime * interval;
                        intervalEnd = startTime + intervalTime * (interval + 1);
                        results = dataSetDAO.retrieve(sessionID, set, intervalStart, intervalEnd);
                        dataSets.getDataSet(set).add(results.aggregate(aggregation, intervalStart, intervalEnd));
                        taskProgress = set * fetchedMaximum + interval;
                        checkCancelled();
                        //logger.debug("Point " + interval + " aggregation of " + results + " : (" + results.aggregate(aggregation, intervalStart, intervalEnd) + ")");
                    }
                }
View Full Code Here

Examples of nz.co.abrahams.asithappens.storage.DataSet

    }

    @Before
    public void setUp() throws java.lang.Exception {
       
        emptySet = new DataSet();
       
        singleNanSet = new DataSet();
        singleNanSet.add(new DataPoint(START_TIME));
       
        multipleNanSet = new DataSet();
        multipleNanSet.add(new DataPoint(START_TIME));
        multipleNanSet.add(new DataPoint(START_TIME + TIME_INCREMENT));
        multipleNanSet.add(new DataPoint(START_TIME + 2 * TIME_INCREMENT));
       
        singlePointSet = new DataSet();
        singlePointSet.add(new DataPoint(START_TIME, INITIAL_VALUE));

        multiplePointSet = new DataSet();
        multiplePointSet.add(new DataPoint(START_TIME, INITIAL_VALUE));
        multiplePointSet.add(new DataPoint(START_TIME + TIME_INCREMENT, INITIAL_VALUE + VALUE_INCREMENT));
        multiplePointSet.add(new DataPoint(START_TIME + 2 * TIME_INCREMENT, INITIAL_VALUE + 2 * VALUE_INCREMENT));
       
    }
View Full Code Here

Examples of nz.co.abrahams.asithappens.storage.DataSet

        if ( size() == 0 )
            fragmented = false;
       
        // Reduce data set values representing the current x pixel
        for ( int set = Math.max(startPosition.getSetNumber(), 0) ; set <= endPosition.getSetNumber()  ; set++ ) {
            DataSet points;
            Iterator iterator;
            DataPoint point;
            double summaryPoint;
            double reductionNumerator;
           
            DataSet dataSet;
            int nextIndex;
            IntegerPair range;
           
            // Find numerator for fraction of summary point set value to keep
            summaryPoint = graphContext.getSummaryData().getValue(set, xPixel - xOffset);
            if ( set == startPosition.getSetNumber() && set == endPosition.getSetNumber() ) {
                reductionNumerator = Math.max(summaryPoint - ( endPosition.getSetOffset() - startPosition.getSetOffset() ), 0);
            } else if ( set == startPosition.getSetNumber() ) {
                reductionNumerator = startPosition.getSetOffset();
            } else if ( set == endPosition.getSetNumber() ) {
                reductionNumerator = Math.max(summaryPoint - endPosition.getSetOffset(), 0);
            } else {
                reductionNumerator = 0;
            }
           
            // Reduce data points whose time falls within the single summary pixel
           
            /*
            points = graphContext.getData().getDataSet(set).pointsBetween(graphContext.getTimeFromXPixel(xPixel), graphContext.getTimeFromXPixel(xPixel + 1));
            iterator = points.iterator();
            while ( iterator.hasNext() ) {
                point = (DataPoint)(iterator.next());
                if ( summaryPoint > 0 )
                    point.setValue(point.getValue() * reductionNumerator / summaryPoint);
                else
                    point.setValue(0);
            }
             */
           
            dataSet = graphContext.getData().getDataSet(set);
            if ( startIndicies[set] == -1 )
                range = dataSet.pointIndiciesBetween(graphContext.getTimeFromXPixel(xPixel), graphContext.getTimeFromXPixel(xPixel + 1));
            else
                range = dataSet.pointIndiciesBetween(graphContext.getTimeFromXPixel(xPixel), graphContext.getTimeFromXPixel(xPixel + 1), startIndicies[set]);
            nextIndex = range.first;
            while ( nextIndex <= range.second ) {
                point = dataSet.elementAt(nextIndex);
                if ( summaryPoint > 0 )
                    point.setValue(point.getValue() * reductionNumerator / summaryPoint);
                else
                    point.setValue(0);
                nextIndex++;
View Full Code Here

Examples of nz.co.abrahams.asithappens.storage.DataSet

        FileWriter out;
        DataSetsDAO dataSetsDAO;
        DataSetDAO dataSetDAO;
        int sessionID;
        String[] headings;
        DataSet dataSet;
        DataPoint point;
       
        savedFile = null;
        dialog = new JOptionPane();
       
        sessionID = getSessionID();
        if ( sessionID == -1 )
            return;
       
        if ( ((JTable)(sessionsPane.getViewport().getView())).getSelectedRowCount() > 1 ) {
            dialog.showMessageDialog(this, "Please select a single session for data export", "Multiple sessions selected", JOptionPane.ERROR_MESSAGE);
            return;
        }
       
        try {
            //.testConnection();
            dataSetsDAO = DAOFactory.getDataSetsDAO();
            dataSetDAO = DAOFactory.getDataSetDAO();
            chooser = new JFileChooser();
            returnStatus = chooser.showSaveDialog(this);
            if ( returnStatus == JFileChooser.APPROVE_OPTION ) {
                savedFile = chooser.getSelectedFile();
                if ( ! savedFile.exists() || ( savedFile.exists() && dialog.showConfirmDialog(null,
                        "File already exists - are you sure you want to overwrite it?",
                        "Confirm file overwrite", JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION ) ) {
                    out = new FileWriter(savedFile);
                   
                    out.write("Title: " + dataSetsDAO.retrieveSessionTitle(sessionID) + "\n");
                    out.write("Description: " + DataSets.retrieveDescription(sessionID) + "\n\n");
                   
                    headings = DAOFactory.getDataHeadingsDAO().retrieve(sessionID);
                   
                    for ( int set = 0 ; set < headings.length ; set++ ) {
                        out.write("Set " + set + ": " + headings[set] + "\n");
                    }
                    out.write("\n");
                   
                    out.write("Set,Time,Value\n");
                    for ( int set = 0; set < headings.length ; set++ ) {
                        dataSet = dataSetDAO.retrieve(sessionID, set);
                        //out.write("Set " + set + ": " + headings[set] + "\n");
                        for ( int i = 0 ; i < dataSet.size() ; i++ ) {
                            point = dataSet.elementAt(i);
                            out.write(set + "," + point.getTime() + "," + point.getValue() + "\n");
                        }
                        //out.write("\n");
                    }
                   
View Full Code Here

Examples of org.apache.clerezza.rdf.core.sparql.query.DataSet

  private void appendVariable(StringBuffer s, Variable v) {
    s.append("?").append(v.getName());
  }

  private void appendDataSet(StringBuffer s, SimpleQuery q) {
    DataSet dataSet = q.getDataSet();
    if (dataSet != null) {
      for (UriRef dg : dataSet.getDefaultGraphs()) {
        s.append("FROM ").append(dg.toString()).append("\n");
      }
      for (UriRef ng : dataSet.getNamedGraphs()) {
        s.append("FROM NAMED ").append(ng.toString()).append("\n");
      }
    }
  }
View Full Code Here

Examples of org.apache.jmeter.report.DataSet

    public double[][] convertToDouble(List data) {
        String[] urls = this.getURLs().split(URL_DELIM);
        double[][] dataset = new double[urls.length][data.size()];
        for (int idx=0; idx < urls.length; idx++) {
            for (int idz=0; idz < data.size(); idz++) {
                DataSet dset = (DataSet)data.get(idz);
                SamplingStatCalculator ss = dset.getStatistics(urls[idx]);
                dataset[idx][idz] = getValue(ss);
            }
        }
        return dataset;
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.