Package net.sf.mzmine.modules.deconvolutedanalysis

Examples of net.sf.mzmine.modules.deconvolutedanalysis.CorrectedSpectrum


      // Check that each spectrum is retention corrected
      List<Integer> retentionIndices = new ArrayList<Integer>();

      for (int scanNumber : dataFile.getScanNumbers(1)) {
        CorrectedSpectrum s = (CorrectedSpectrum) dataFile
            .getScan(scanNumber);
        assert s.isRetentionCorrected();
        assert (s.getRetentionCorrection() != null);

        results = s.getRetentionCorrectionResults();
        retentionIndices.add(s.getRetentionIndex());
      }

      // Check that the retention indices are increasing
      assert Ordering.natural().isOrdered(retentionIndices);
View Full Code Here


        .getElementName())) {

      StorableScan scan = null;

      if (isCorrectedSpectrum) {
        scan = new CorrectedSpectrum(newRawDataFile, currentStorageID,
            scanNumber, originalRetentionTime,
            (int) retentionIndex, (int) uniqueMass,
            dataPointsNumber, centroided);
      } else {
        scan = new StorableScan(newRawDataFile, currentStorageID,
View Full Code Here

        Scan spectrum = dataFile.getScan(scanNumber);

        // Add scan to new data file
        int storageID = rawDataFileWriter.storeDataPoints(spectrum
            .getDataPoints());
        CorrectedSpectrum newSpectrum = new CorrectedSpectrum(spectrum,
            rawDataFileWriter, spectrum.getNumberOfDataPoints(),
            storageID);

        rawDataFileWriter.addScan(newSpectrum);
View Full Code Here

    CombinedRegression fit = new CombinedRegression(5);
    fit.setData(Doubles.toArray(fameTimes), Doubles.toArray(fameIndices));

    // Add calculated retention index to each mass spectrum
    for (int scanNumber : correctedDataFile.getScanNumbers(1)) {
      CorrectedSpectrum s = (CorrectedSpectrum) correctedDataFile
          .getScan(scanNumber);
      s.setRetentionIndex((int) fit.getY(s.getRetentionTime()));
      s.setRetentionCorrection(fit, results);
    }
  }
View Full Code Here

   */
  private void processPCI() {
    // Product a list of candidates for each individual FAME peak
    List<List<CorrectedSpectrum>> candidates = new ArrayList<List<CorrectedSpectrum>>();

    CorrectedSpectrum bestMatch = null, highestMatch = null;
    double bestIntensity = 0, highestIntensity = 0;
    int bestLibraryMatch = -1, highestLibraryMatch = -1;

    for (int i = 0; i < FameData.N_FAMES; i++) {
      int mass = FameData.FAME_MASSES[i] + 1;
      String name = FameData.FAME_NAMES[i];

      // Search for [M + H]+ ion for each FAME marker
      List<CorrectedSpectrum> matches = new ArrayList<CorrectedSpectrum>();
      double maxBasePeakIntensity = 0;

      for (CorrectedSpectrum spectrum : spectra) {
        // Canceled?
        if (isCanceled())
          return;

        DataPoint basePeak = spectrum.getBasePeak();

        if (basePeak != null && (int) basePeak.getMZ() == mass) {
          matches.add(spectrum);

          // Compute maximum base peak intensity of these FAME markers
          if (basePeak.getIntensity() > maxBasePeakIntensity)
            maxBasePeakIntensity = basePeak.getIntensity();

          // Find highest intensity FAME marker
          if (basePeak.getIntensity() > highestIntensity) {
            highestMatch = spectrum;
            highestIntensity = basePeak.getIntensity();
            highestLibraryMatch = i;
          }
        }
      }

      // Find initial standard match
      CorrectedSpectrum bestCandidate = null;
      int count = 0;

      for (CorrectedSpectrum s : matches) {
        if (s.getBasePeak().getIntensity() > 0.5 * maxBasePeakIntensity) {
          bestCandidate = s;
          count++;
        }
      }

      if (count == 1 && bestCandidate != null && i > 1
          && i < FameData.N_FAMES - 2) {
        if (bestMatch == null) {
          bestMatch = bestCandidate;
          bestIntensity = bestCandidate.getBasePeak().getIntensity();
          bestLibraryMatch = i;

          logger.info("Best Match: " + name + " "
              + bestMatch.getScanNumber() + " "
              + bestMatch.getRetentionTime() + " "
              + bestIntensity);
        }
      }

      candidates.add(matches);
    }

    if (bestMatch == null) {
      bestMatch = highestMatch;
      bestIntensity = highestIntensity;
      bestLibraryMatch = highestLibraryMatch;
    }

    // Return an error if no initial match is found
    if (bestMatch == null) {
      MZmineCore.getDesktop().displayErrorMessage(
          "Unable to find initial standard match in "
              + dataFile.getName());
      setStatus(TaskStatus.ERROR);
      cancel();
      return;
    }

    List<Double> fameTimes = new ArrayList<Double>();
    List<Double> fameIndices = new ArrayList<Double>();
    List<String> fameNames = new ArrayList<String>();

    // Search for each FAME marker individually
    for (int i = 0; i < FameData.N_FAMES; i++) {
      List<CorrectedSpectrum> matches = candidates.get(i);

      if (matches.size() > 0) {
        double shift = FameData.FAME_RETENTION_TIMES[bestLibraryMatch]
            - FameData.FAME_RETENTION_TIMES[i];
        double expectedRt = bestMatch.getRetentionTime() - shift;

        for (Iterator<CorrectedSpectrum> it = matches.iterator(); it
            .hasNext();) {
          CorrectedSpectrum s = it.next();

          // Filter those peaks outside of expected range
          if (Math.abs(s.getRetentionTime() - expectedRt) > timeWindow)
            it.remove();
        }

        highestMatch = null;
        highestIntensity = 0;

        for (CorrectedSpectrum s : matches) {
          if (s.getBasePeak().getIntensity() > highestIntensity) {
            highestMatch = s;
            highestIntensity = s.getBasePeak().getIntensity();
          }
        }

        if (highestMatch != null) {
          fameTimes.add(highestMatch.getRetentionTime());
          fameIndices
              .add((double) FameData.FAME_RETENTION_INDICES[i]);
          fameNames.add(FameData.FAME_NAMES[i]);
        }
      }
    }

    // Store retention correction results
    results = new TreeMap<String, FameCorrection>();

    for (int i = 0; i < fameTimes.size(); i++)
      results.put(fameNames.get(i), new FameCorrection(correctedDataFile,
          fameTimes.get(i), (int) fameIndices.get(i).doubleValue()));

    // Log results
    logger.info(dataFile + "");
    logger.info(fameTimes + "");
    logger.info(fameNames + "");

    // Apply linear/polynomial fit
    CombinedRegression fit = new CombinedRegression(5);
    fit.setData(Doubles.toArray(fameTimes), Doubles.toArray(fameIndices));

    // Add calculated retention index to each mass spectrum
    for (int scanNumber : correctedDataFile.getScanNumbers(1)) {
      CorrectedSpectrum s = (CorrectedSpectrum) correctedDataFile
          .getScan(scanNumber);
      s.setRetentionIndex((int) fit.getY(s.getRetentionTime()));
      s.setRetentionCorrection(fit, results);
    }
  }
View Full Code Here

        new char[]{scan instanceof CorrectedSpectrum ? '1' : '0'}, 0, 1);
    hd.endElement("", "",
        RawDataElementName.CORRECTED_SPECTRUM.getElementName());

    if (scan instanceof CorrectedSpectrum) {
      CorrectedSpectrum s = (CorrectedSpectrum) scan;

      hd.startElement(
          "",
          "",
          RawDataElementName.ORIGINAL_RETENTION_TIME.getElementName(),
          atts);
      hd.characters(String.valueOf(s.getOriginalRetentionTime())
          .toCharArray(), 0,
          String.valueOf(s.getOriginalRetentionTime()).length());
      hd.endElement("", "",
          RawDataElementName.ORIGINAL_RETENTION_TIME.getElementName());

      hd.startElement("", "",
          RawDataElementName.RETENTION_INDEX.getElementName(), atts);
      hd.characters(String.valueOf(s.getRetentionIndex()).toCharArray(),
          0, String.valueOf(s.getRetentionIndex()).length());
      hd.endElement("", "",
          RawDataElementName.RETENTION_INDEX.getElementName());

      hd.startElement("", "",
          RawDataElementName.UNIQUE_MASS.getElementName(), atts);
      if (s.getUniqueMass() == null)
        hd.characters(new char[]{'-', '1'}, 0, 2);
      else
        hd.characters(String.valueOf(s.getUniqueMass().getMZ())
            .toCharArray(), 0,
            String.valueOf(s.getUniqueMass().getMZ()).length());
      hd.endElement("", "",
          RawDataElementName.UNIQUE_MASS.getElementName());

      fit = s.getRetentionCorrection();
      correctionResults = s.getRetentionCorrectionResults();
    }
  }
View Full Code Here

          continue;

        // Exclude the entire spectrum if its unique mass intensity is
        // less than the given threshold
        if (spectrum instanceof CorrectedSpectrum) {
          CorrectedSpectrum s = ((CorrectedSpectrum) spectrum);

          if (s.getUniqueMass() != null
              && s.getUniqueMass().getIntensity() < uniqueMassThreshold)
            continue;
        }

        // Get the data points from the spectrum and sort by m/z
        List<DataPoint> dataPoints = Lists.newArrayList(spectrum
            .getDataPoints());
        Collections.sort(dataPoints, new Comparator<DataPoint>() {
          @Override
          public int compare(DataPoint a, DataPoint b) {
            return a.getMZ() < b.getMZ() ? -1 : a.getMZ() > b
                .getMZ() ? 1 : 0;
          }
        });

        // Create a list for the filtered points
        List<DataPoint> filteredDataPoints = new ArrayList<DataPoint>();

        // Filter the data points given pre-defined conditions
        for (int i = dataPoints.size() - 1; i >= 0; i--) {
          // Step #1: Remove C13 Isotope ions
          if (i > 0
              && dataPoints.get(i).getMZ()
                  - dataPoints.get(i - 1).getMZ() < 1 + EPSILON
              && dataPoints.get(i - 1).getIntensity() >= (1 + c13IsotopeCut)
                  * dataPoints.get(i).getIntensity())
            continue;

          // Step #2: Remove all ions < 100 counts
          else if (dataPoints.get(i).getIntensity() < intensityThreshold)
            continue;

          // Step #3: Remove all ions < 1% of base peak
          else if (dataPoints.get(i).getIntensity() < intensityPercentageThreshold
              * spectrum.getBasePeak().getIntensity())
            continue;

          // If the data point passes all filters, keep it.
          else
            filteredDataPoints.add(0, dataPoints.get(i));
        }

        // Add scan to new data file
        int storageID = rawDataFileWriter
            .storeDataPoints(filteredDataPoints
                .toArray(new DataPoint[filteredDataPoints
                    .size()]));
        CorrectedSpectrum newSpectrum = new CorrectedSpectrum(spectrum,
            rawDataFileWriter, filteredDataPoints.size(), storageID);
        rawDataFileWriter.addScan(newSpectrum);

        processedScans++;
      }
View Full Code Here

TOP

Related Classes of net.sf.mzmine.modules.deconvolutedanalysis.CorrectedSpectrum

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.