Package de.lmu.ifi.dbs.elki.database.ids

Examples of de.lmu.ifi.dbs.elki.database.ids.ModifiableDBIDs


          minDist = new Pair<DoubleDistance, Integer>(currentDist, i);
        }
      }
      // add p to cluster with mindist
      assert minDist != null;
      ModifiableDBIDs ids = clusterIDs.get(minDist.second);
      ids.add(p_id);
    }

    List<PROCLUSCluster> clusters = new ArrayList<PROCLUSCluster>();
    for(int i = 0; i < dimensions.size(); i++) {
      ModifiableDBIDs objectIDs = clusterIDs.get(i);
      if(!objectIDs.isEmpty()) {
        Set<Integer> clusterDimensions = dimensions.get(i).second;
        V centroid = DatabaseUtil.centroid(database, objectIDs);
        clusters.add(new PROCLUSCluster(objectIDs, clusterDimensions, centroid));
      }
    }
View Full Code Here


   * @param clusters the clusters
   * @param threshold the threshold
   * @return the bad medoids
   */
  private ModifiableDBIDs computeBadMedoids(Map<DBID, PROCLUSCluster> clusters, int threshold) {
    ModifiableDBIDs badMedoids = DBIDUtil.newHashSet();
    for(DBID m_i : clusters.keySet()) {
      PROCLUSCluster c_i = clusters.get(m_i);
      if(c_i.objectIDs.size() < threshold) {
        badMedoids.add(m_i);
      }
    }
    return badMedoids;
  }
View Full Code Here

    final int dim = DatabaseUtil.dimensionality(relation);

    // init heap
    Heap<IntegerPriorityObject<CASHInterval>> heap = new Heap<IntegerPriorityObject<CASHInterval>>();
    ModifiableDBIDs noiseIDs = DBIDUtil.newHashSet(relation.getDBIDs());
    initHeap(heap, relation, dim, noiseIDs);

    if(logger.isDebugging()) {
      StringBuffer msg = new StringBuffer();
      msg.append("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX");
      msg.append("\nXXXX dim ").append(dim);
      msg.append("\nXXXX database.size ").append(relation.size());
      msg.append("\nXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX");
      logger.debugFine(msg.toString());
    }
    else if(logger.isVerbose()) {
      StringBuffer msg = new StringBuffer();
      msg.append("XXXX dim ").append(dim).append(" database.size ").append(relation.size());
      logger.verbose(msg.toString());
    }

    // get the ''best'' d-dimensional intervals at max level
    while(!heap.isEmpty()) {
      CASHInterval interval = determineNextIntervalAtMaxLevel(heap);
      if(logger.isDebugging()) {
        logger.debugFine("next interval in dim " + dim + ": " + interval);
      }
      else if(logger.isVerbose()) {
        logger.verbose("next interval in dim " + dim + ": " + interval);
      }

      // only noise left
      if(interval == null) {
        break;
      }

      // do a dim-1 dimensional run
      ModifiableDBIDs clusterIDs = DBIDUtil.newHashSet();
      if(dim > minDim + 1) {
        ModifiableDBIDs ids;
        Matrix basis_dim_minus_1;
        if(adjust) {
          ids = DBIDUtil.newHashSet();
          basis_dim_minus_1 = runDerivator(relation, dim, interval, ids);
        }
        else {
          ids = interval.getIDs();
          basis_dim_minus_1 = determineBasis(SpatialUtil.centroid(interval));
        }

        if(ids.size() != 0) {
          MaterializedRelation<ParameterizationFunction> db = buildDB(dim, basis_dim_minus_1, ids, relation);
          // add result of dim-1 to this result
          Clustering<Model> res_dim_minus_1 = doRun(db, progress);
          for(Cluster<Model> cluster : res_dim_minus_1.getAllClusters()) {
            res.addCluster(cluster);
View Full Code Here

      else {
        d_maxs[i] = d_max - d_mins[i];
      }

      HyperBoundingBox alphaInterval = new HyperBoundingBox(alphaMin, alphaMax);
      ModifiableDBIDs intervalIDs = split.determineIDs(ids, alphaInterval, d_mins[i], d_maxs[i]);
      if(intervalIDs != null && intervalIDs.size() >= minPts) {
        CASHInterval rootInterval = new CASHInterval(alphaMin, alphaMax, split, intervalIDs, 0, 0, d_mins[i], d_maxs[i]);
        heap.add(new IntegerPriorityObject<CASHInterval>(rootInterval.priority(), rootInterval));
      }
    }
View Full Code Here

      return candidates;
    }

    // refinement of candidates
    Map<DBID, KNNHeap<D>> knnLists = new HashMap<DBID, KNNHeap<D>>();
    ModifiableDBIDs candidateIDs = DBIDUtil.newArray();
    for(DistanceResultPair<D> candidate : candidates) {
      KNNHeap<D> knns = new KNNHeap<D>(k, getDistanceQuery().infiniteDistance());
      knnLists.put(candidate.getDBID(), knns);
      candidateIDs.add(candidate.getDBID());
    }
    batchNN(getRoot(), candidateIDs, knnLists);

    List<DistanceResultPair<D>> result = new ArrayList<DistanceResultPair<D>>();
    for(DBID cid : candidateIDs) {
View Full Code Here

          }
        }
      }
    }
    else {
      ModifiableDBIDs ids = DBIDUtil.newArray(knnLists.size());
      ids.addAll(knnLists.keySet());
      List<DistanceEntry<D, SpatialEntry>> entries = getSortedEntries(node, ids);
      for(DistanceEntry<D, SpatialEntry> distEntry : entries) {
        D minDist = distEntry.getDistance();
        for(Entry<DBID, KNNHeap<D>> ent : knnLists.entrySet()) {
          final KNNHeap<D> knns_q = ent.getValue();
View Full Code Here

      final WritableDataStore<DBIDs> store = DataStoreUtil.makeStorage(database.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_STATIC | DataStoreFactory.HINT_TEMP, DBIDs.class);

      // Expand multiple steps
      FiniteProgress progress = logger.isVerbose() ? new FiniteProgress("Expanding neighborhoods", database.size(), logger) : null;
      for(final DBID id : database.iterDBIDs()) {
        ModifiableDBIDs res = DBIDUtil.newHashSet(id);
        DBIDs todo = id;
        for(int i = 0; i < steps; i++) {
          ModifiableDBIDs ntodo = DBIDUtil.newHashSet();
          for(final DBID oid : todo) {
            DBIDs add = innerinst.getNeighborDBIDs(oid);
            if(add != null) {
              for (DBID nid: add) {
                if (res.contains(add)) {
                  continue;
                }
                ntodo.add(nid);
                res.add(nid);
              }
            }
          }
          if (ntodo.size() == 0) {
            continue;
          }
          todo = ntodo;
        }
        store.put(id, res);
View Full Code Here

    WritableDataStore<Double> scores = DataStoreUtil.makeStorage(relationx.getDBIDs(), DataStoreFactory.HINT_STATIC, Double.class);
    DoubleMinMax mm = new DoubleMinMax(0.0, 0.0);

    // Outlier detection loop
    {
      ModifiableDBIDs idview = DBIDUtil.newHashSet(relationx.getDBIDs());
      ProxyView<V> proxy = new ProxyView<V>(relationx.getDatabase(), idview, relationx);

      double phialpha = MathUtil.standardNormalProbit(1.0 - alpha / 2);
      // Detect outliers while significant.
      while(true) {
        Pair<DBID, Double> candidate = singleIteration(proxy, relationy);
        if(candidate.second < phialpha) {
          break;
        }
        scores.put(candidate.first, candidate.second);
        if (!Double.isNaN(candidate.second)) {
          mm.put(candidate.second);
        }
        idview.remove(candidate.first);
      }

      // Remaining objects are inliers
      for(DBID id : idview) {
        scores.put(id, 0.0);
View Full Code Here

      }

      // Fill the neighborhood matrix F:
      {
        List<DistanceResultPair<D>> neighbors = knnQuery.getKNNForDBID(id, k + 1);
        ModifiableDBIDs neighborhood = DBIDUtil.newArray(neighbors.size());
        for(DistanceResultPair<D> dpair : neighbors) {
          if(id.equals(dpair.getDBID())) {
            continue;
          }
          neighborhood.add(dpair.getDBID());
        }
        // Weight object itself positively.
        F.set(i, i, 1.0);
        final int nweight = -1 / neighborhood.size();
        // We need to find the index positions of the neighbors, unfortunately.
        for(DBID nid : neighborhood) {
          int pos = Collections.binarySearch(ids, nid);
          assert (pos >= 0);
          F.set(pos, i, nweight);
View Full Code Here

    return 1.0 - (tsteps / (float) (steps + 1));
  }

  @Override
  public Collection<DoubleObjPair<DBID>> getWeightedNeighbors(DBID reference) {
    ModifiableDBIDs seen = DBIDUtil.newHashSet();
    List<DoubleObjPair<DBID>> result = new ArrayList<DoubleObjPair<DBID>>();

    // Add starting object
    result.add(new DoubleObjPair<DBID>(computeWeight(0), reference));
    seen.add(reference);
    // Extend.
    DBIDs cur = reference;
    for(int i = 1; i <= steps; i++) {
      final double weight = computeWeight(i);
      // Collect newly discovered IDs
      ModifiableDBIDs add = DBIDUtil.newHashSet();
      for(DBID id : cur) {
        for(DBID nid : inner.getNeighborDBIDs(id)) {
          // Seen before?
          if(seen.contains(nid)) {
            continue;
          }
          add.add(nid);
          result.add(new DoubleObjPair<DBID>(weight, nid));
        }
      }
      if(add.size() == 0) {
        break;
      }
      cur = add;
    }
    return result;
View Full Code Here

TOP

Related Classes of de.lmu.ifi.dbs.elki.database.ids.ModifiableDBIDs

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.