Package de.lmu.ifi.dbs.elki.database.ids

Examples of de.lmu.ifi.dbs.elki.database.ids.HashSetModifiableDBIDs


   * @param extraxt a list of lists of DistanceResultPair to extract
   * @param remove the ids to remove
   * @return the DBIDs in the given collection
   */
  protected ArrayDBIDs extractAndRemoveIDs(List<? extends Collection<DistanceResultPair<D>>> extraxt, ArrayDBIDs remove) {
    HashSetModifiableDBIDs ids = DBIDUtil.newHashSet();
    for(Collection<DistanceResultPair<D>> drps : extraxt) {
      for(DistanceResultPair<D> drp : drps) {
        ids.add(drp.getDBID());
      }
    }
    for(DBID id : remove) {
      ids.remove(id);
    }
    // Convert back to array
    return DBIDUtil.newArray(ids);
  }
View Full Code Here


      logger.verbose("Greedy ensemble Gain to naive: " + gain(greedyauc, naiveauc, 1) + " cost gain: " + gain(greedycost, naivecost, 0));
    }
    {
      MeanVariance meanauc = new MeanVariance();
      MeanVariance meancost = new MeanVariance();
      HashSetModifiableDBIDs candidates = DBIDUtil.newHashSet(relation.getDBIDs());
      candidates.remove(firstid);
      for(int i = 0; i < 5000; i++) {
        // Build the improved ensemble:
        final double[] randomensemble = new double[dim];
        {
          DBIDs random = DBIDUtil.randomSample(candidates, ensemble.size(), i);
View Full Code Here

  /**
   * Test ROC curve generation, including curve simplification
   */
  @Test
  public void testROCCurve() {
    HashSetModifiableDBIDs positive = DBIDUtil.newHashSet();
    positive.add(DBIDUtil.importInteger(1));
    positive.add(DBIDUtil.importInteger(2));
    positive.add(DBIDUtil.importInteger(3));
    positive.add(DBIDUtil.importInteger(4));
    positive.add(DBIDUtil.importInteger(5));

    ArrayList<Pair<Double, DBID>> distances = new ArrayList<Pair<Double, DBID>>();
    distances.add(new Pair<Double, DBID>(0.0, DBIDUtil.importInteger(1)));
    distances.add(new Pair<Double, DBID>(1.0, DBIDUtil.importInteger(2)));
    distances.add(new Pair<Double, DBID>(2.0, DBIDUtil.importInteger(6)));
View Full Code Here

      resultIntervals.add(i1);
    }
    resultIntervals.add(this.intervals.last());
    resultIntervals.add(other.intervals.last());

    HashSetModifiableDBIDs resultIDs = DBIDUtil.newHashSet(this.ids);
    resultIDs.retainAll(other.ids);

    if(resultIDs.size() / all >= tau) {
      return new CLIQUEUnit<V>(resultIntervals, resultIDs);
    }

    return null;
  }
View Full Code Here

   *
   * @param subspace
   * @return ids
   */
  protected DBIDs computeSubspace(Vector<IntIntPair> subspace, ArrayList<ArrayList<DBIDs>> ranges) {
    HashSetModifiableDBIDs ids = DBIDUtil.newHashSet(ranges.get(subspace.get(0).first - 1).get(subspace.get(0).second));
    // intersect all selected dimensions
    for(int i = 1; i < subspace.size(); i++) {
      DBIDs current = ranges.get(subspace.get(i).first - 1).get(subspace.get(i).second);
      ids.retainAll(current);
      if(ids.size() == 0) {
        break;
      }
    }
    return ids;
  }
View Full Code Here

   * @param gene gene data
   * @param ranges Database ranges
   * @return resulting DBIDs
   */
  protected DBIDs computeSubspaceForGene(int[] gene, ArrayList<ArrayList<DBIDs>> ranges) {
    HashSetModifiableDBIDs m = DBIDUtil.newHashSet(ranges.get(0).get(gene[0]));
    // intersect
    for(int i = 1; i < gene.length; i++) {
      if(gene[i] != DONT_CARE) {
        DBIDs current = ranges.get(i).get(gene[i]);
        m.retainAll(current);
      }
    }
    return m;
  }
View Full Code Here

      final WritableDataStore<DBIDs> store = DataStoreUtil.makeStorage(database.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_STATIC | DataStoreFactory.HINT_TEMP, DBIDs.class);

      // Expand multiple steps
      FiniteProgress progress = logger.isVerbose() ? new FiniteProgress("Expanding neighborhoods", database.size(), logger) : null;
      for(final DBID id : database.iterDBIDs()) {
        HashSetModifiableDBIDs res = DBIDUtil.newHashSet(id);
        DBIDs todo = id;
        for(int i = 0; i < steps; i++) {
          ModifiableDBIDs ntodo = DBIDUtil.newHashSet();
          for(final DBID oid : todo) {
            DBIDs add = innerinst.getNeighborDBIDs(oid);
            if(add != null) {
              for(DBID nid : add) {
                if(res.contains(nid)) {
                  continue;
                }
                ntodo.add(nid);
                res.add(nid);
              }
            }
          }
          if(ntodo.size() == 0) {
            continue;
View Full Code Here

    // visualization
    List<SteepArea> salist = new ArrayList<SteepArea>();
    List<SteepDownArea> sdaset = new java.util.Vector<SteepDownArea>();
    ModifiableHierarchy<Cluster<OPTICSModel>> hier = new HierarchyHashmapList<Cluster<OPTICSModel>>();
    HashSet<Cluster<OPTICSModel>> curclusters = new HashSet<Cluster<OPTICSModel>>();
    HashSetModifiableDBIDs unclaimedids = DBIDUtil.newHashSet(relation.getDBIDs());

    SteepScanPosition<N> scan = new SteepScanPosition<N>(clusterOrder);
    while(scan.hasNext()) {
      final int curpos = scan.index;
      // Update maximum-inbetween
      mib = Math.max(mib, scan.ecurr.getReachability().doubleValue());
      // The last point cannot be the start of a steep area.
      if(scan.esucc != null) {
        // Xi-steep down area
        if(scan.steepDown(ixi)) {
          // Update mib values with current mib and filter
          updateFilterSDASet(mib, sdaset, ixi);
          final double startval = scan.ecurr.getReachability().doubleValue();
          int startsteep = scan.index;
          int endsteep = Math.min(scan.index + 1, clusterOrder.size());
          {
            while(scan.hasNext()) {
              scan.next();
              // not going downward at all - stop here.
              if(!scan.steepDown(1.0)) {
                break;
              }
              // still steep - continue.
              if(scan.steepDown(ixi)) {
                endsteep = Math.min(scan.index + 1, clusterOrder.size());
              }
              else {
                // Stop looking after minpts "flat" steps.
                if(scan.index - endsteep > minpts) {
                  break;
                }
              }
            }
          }
          mib = clusterOrder.get(endsteep).getReachability().doubleValue();
          final SteepDownArea sda = new SteepDownArea(startsteep, endsteep, startval, 0);
          if(logger.isDebuggingFinest()) {
            logger.debugFinest("Xi " + sda.toString());
          }
          sdaset.add(sda);
          if(salist != null) {
            salist.add(sda);
          }
          continue;
        }
        else
        // Xi-steep up area
        if(scan.steepUp(ixi)) {
          // Update mib values with current mib and filter
          updateFilterSDASet(mib, sdaset, ixi);
          final SteepUpArea sua;
          // Compute steep-up area
          {
            int startsteep = scan.index;
            int endsteep = scan.index + 1;
            mib = scan.ecurr.getReachability().doubleValue();
            double esuccr = scan.esucc.getReachability().doubleValue();
            // There is nothing higher than infinity
            if(!Double.isInfinite(esuccr)) {
              // find end of steep-up-area, eventually updating mib again
              while(scan.hasNext()) {
                scan.next();
                // not going upward - stop here.
                if(!scan.steepUp(1.0)) {
                  break;
                }
                // still steep - continue.
                if(scan.steepUp(ixi)) {
                  endsteep = Math.min(scan.index + 1, clusterOrder.size() - 1);
                  mib = scan.ecurr.getReachability().doubleValue();
                  esuccr = scan.esucc.getReachability().doubleValue();
                }
                else {
                  // Stop looking after minpts non-up steps.
                  if(scan.index - endsteep > minpts) {
                    break;
                  }
                }
              }
            }
            sua = new SteepUpArea(startsteep, endsteep, esuccr);
            if(logger.isDebuggingFinest()) {
              logger.debugFinest("Xi " + sua.toString());
            }
            if(salist != null) {
              salist.add(sua);
            }
          }
          // Validate and computer clusters
          // logger.debug("SDA size:"+sdaset.size()+" "+sdaset);
          ListIterator<SteepDownArea> sdaiter = sdaset.listIterator(sdaset.size());
          // Iterate backwards for correct hierarchy generation.
          while(sdaiter.hasPrevious()) {
            SteepDownArea sda = sdaiter.previous();
            // logger.debug("Comparing: eU="+mib.doubleValue()+" SDA: "+sda.toString());
            // Condition 3b: end-of-steep-up > maximum-in-between lower
            if(mib * ixi < sda.getMib()) {
              continue;
            }
            // By default, clusters cover both the steep up and steep down area
            int cstart = sda.getStartIndex();
            int cend = sua.getEndIndex();
            // However, we sometimes have to adjust this (Condition 4):
            {
              // Case b)
              if(sda.getMaximum() * ixi >= sua.getMaximum()) {
                while(cstart < sda.getEndIndex()) {
                  if(clusterOrder.get(cstart + 1).getReachability().doubleValue() > sua.getMaximum()) {
                    cstart++;
                  }
                  else {
                    break;
                  }
                }
              }
              // Case c)
              else if(sua.getMaximum() * ixi >= sda.getMaximum()) {
                while(cend > sua.getStartIndex()) {
                  if(clusterOrder.get(cend - 1).getReachability().doubleValue() > sda.getMaximum()) {
                    cend--;
                  }
                  else {
                    break;
                  }
                }
              }
              // Case a) is the default
            }
            // Condition 3a: obey minpts
            if(cend - cstart + 1 < minpts) {
              continue;
            }
            // Build the cluster
            ModifiableDBIDs dbids = DBIDUtil.newArray();
            for(int idx = cstart; idx <= cend; idx++) {
              final DBID dbid = clusterOrder.get(idx).getID();
              // Collect only unclaimed IDs.
              if(unclaimedids.remove(dbid)) {
                dbids.add(dbid);
              }
            }
            if(logger.isDebuggingFine()) {
              logger.debugFine("Found cluster with " + dbids.size() + " new objects, length " + (cstart - cend + 1));
            }
            OPTICSModel model = new OPTICSModel(cstart, cend);
            Cluster<OPTICSModel> cluster = new Cluster<OPTICSModel>("Cluster_" + cstart + "_" + cend, dbids, model, hier);
            // Build the hierarchy
            {
              Iterator<Cluster<OPTICSModel>> iter = curclusters.iterator();
              while(iter.hasNext()) {
                Cluster<OPTICSModel> clus = iter.next();
                OPTICSModel omodel = clus.getModel();
                if(model.getStartIndex() <= omodel.getStartIndex() && omodel.getEndIndex() <= model.getEndIndex()) {
                  hier.add(cluster, clus);
                  iter.remove();
                }
              }
            }
            curclusters.add(cluster);
          }
        }
      }
      // Make sure to advance at least one step
      if(curpos == scan.index) {
        scan.next();
      }
    }
    if(curclusters.size() > 0 || unclaimedids.size() > 0) {
      final Clustering<OPTICSModel> clustering = new Clustering<OPTICSModel>("OPTICS Xi-Clusters", "optics");
      if(unclaimedids.size() > 0) {
        final Cluster<OPTICSModel> allcluster;
        if(clusterOrder.get(clusterOrder.size() - 1).getReachability().isInfiniteDistance()) {
          allcluster = new Cluster<OPTICSModel>("Noise", unclaimedids, true, new OPTICSModel(0, clusterOrder.size() - 1), hier);
        }
        else {
View Full Code Here

TOP

Related Classes of de.lmu.ifi.dbs.elki.database.ids.HashSetModifiableDBIDs

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.