Package com.clearnlp.classification.vector

Examples of com.clearnlp.classification.vector.SparseFeatureVector


   * @param vector the string feature vector.
   * @return the sparse feature vector converted from the string feature vector.
   */
  public SparseFeatureVector toSparseFeatureVector(StringFeatureVector vector)
  {
    SparseFeatureVector sparse = new SparseFeatureVector(vector.hasWeight());
    int i, index, size = vector.size();
    ObjectIntHashMap<String> map;
    String type, value;
   
    for (i=0; i<size; i++)
    {
      type  = vector.getType(i);
      value = vector.getValue(i);
     
      if ((map = m_features.get(type)) != null && (index = map.get(value)) > 0)
      {
        if (sparse.hasWeight())
          sparse.addFeature(index, vector.getWeight(i));
        else
          sparse.addFeature(index);
      }
    }
   
    sparse.trimToSize();
    return sparse;
  }
View Full Code Here


 
// ================================ BUILD ================================

  public void build(int labelCutoff, int featureCutoff, int randomSeed, boolean initialize)
  {
    SparseFeatureVector vector;
    StringInstance instance;
    int label;
   
    if (initialize) init();
    buildLabels(labelCutoff);
    buildFeatures(featureCutoff);
   
    l_instances = Lists.newArrayList();
    r_shuffle = new Random(randomSeed);
    l_indices = new IntArrayList();
   
    while ((instance = i_collector.pollInstance()) != null)
    {
      if ((label = getLabelIndex(instance.getLabel())) < 0)
        continue;
     
      vector = toSparseFeatureVector(instance.getFeatureVector());
     
      if (!vector.isEmpty())
      {
        l_instances.add(new IntInstance(label, vector));
        l_indices.add(l_indices.size());
      }
    }
View Full Code Here

    return scores;
  }
 
  protected void updateCounts(StringModelAD model, IntInstance instance, double[] gradidents)
  {
    SparseFeatureVector x = instance.getFeatureVector();
    int i, j, len = x.size(), L = model.getLabelSize();
    double[] g = new double[L];
    double d;
   
    for (j=0; j<L; j++)
      g[j] = gradidents[j] * gradidents[j];
   
    for (i=0; i<len; i++)
    {
      d = UTMath.sq(x.getWeight(i));
     
      for (j=0; j<L; j++)
        d_gradients[model.getWeightIndex(j, x.getIndex(i))] += d * g[j];
    }
  }
 
View Full Code Here

    }
  }
 
  private void updateWeights(StringModelAD model, IntInstance instance, double[] gradients, int averageCount)
  {
    SparseFeatureVector x = instance.getFeatureVector();
    int i, j, len = x.size(), L = model.getLabelSize();
    int xi; double vi;
   
    for (i=0; i<len; i++)
    {
      xi = x.getIndex(i);
      vi = x.getWeight(i);
     
      for (j=0; j<L; j++)
        updateWeight(model, j, xi, gradients[j]*vi, averageCount);
    }
  }
View Full Code Here

    return qd;
  }
 
  private void updateWeights(StringModelAD model, IntInstance instance, int yp, int yn, double d)
  {
    SparseFeatureVector x = instance.getFeatureVector();
    int i, xi, len = x.size();
    float vi;
   
    for (i=0; i<len; i++)
    {
      xi = x.getIndex(i);
      vi = (float)(x.getWeight(i) * d);
     
      model.updateWeight(yp, xi,  vi);
      model.updateWeight(yn, xi, -vi);
    }
  }
View Full Code Here

    return Collections.max(ps);
  }
 
  private void updateCounts(StringModelAD model, IntInstance instance, int yp, int yn)
  {
    SparseFeatureVector x = instance.getFeatureVector();
    int i, xi, len = x.size();
    double vi;
   
    for (i=0; i<len; i++)
    {
      xi = x.getIndex(i);
      vi = UTMath.sq(x.getWeight(i));
     
      d_gradients[model.getWeightIndex(yp, xi)] += vi;
      d_gradients[model.getWeightIndex(yn, xi)] += vi;
    }
  }
View Full Code Here

    }
  }
 
  private void updateWeights(StringModelAD model, IntInstance instance, int yp, int yn, int averageCount)
  {
    SparseFeatureVector x = instance.getFeatureVector();
    int i, xi, len = x.size();
    double vi;
   
    for (i=0; i<len; i++)
    {
      xi = x.getIndex(i);
      vi = x.getWeight(i);

      updateWeight(model, yp, xi,  vi, averageCount);
      updateWeight(model, yn, xi, -vi, averageCount);
    }
  }
View Full Code Here

    LOG.info("Building:\n");
    initModelMaps();
   
    StringInstance instance;
    int y, i, size = s_instances.size();
    SparseFeatureVector x;
   
    for (i=0; i<size; i++)
    {
      instance = s_instances.get(i);
     
      if ((y = s_model.getLabelIndex(instance.getLabel())) < 0)
        continue;
     
      x = s_model.toSparseFeatureVector(instance.getFeatureVector());
     
      a_ys.add(y);
      a_xs.add(x.getIndices());
      if (b_weighta_vs.add(x.getWeights());
    }
   
    a_ys.trimToSize();
    a_xs.trimToSize();
    if (b_weighta_vs.trimToSize();
View Full Code Here

    LOG.info("Building:\n");
    initModelMaps();
   
    StringInstance instance;
    int y, i, size = s_instances.size();
    SparseFeatureVector x;
   
    for (i=0; i<size; i++)
    {
      instance = s_instances.get(i);
     
      if ((y = s_model.getLabelIndex(instance.getLabel())) < 0)
        continue;
     
      x = s_model.toSparseFeatureVector(instance.getFeatureVector());
     
      a_ys.add(y);
      a_xs.add(x.getIndices());
      if (b_weighta_vs.add(x.getWeights());
    }
   
    a_ys.trimToSize();
    a_xs.trimToSize();
    if (b_weighta_vs.trimToSize();
View Full Code Here

  static public Pair<String,SparseFeatureVector> toInstance(String line, boolean hasWeight)
  {
    String[] tmp = line.split(DELIM_COL);
    String label = tmp[0];
   
    SparseFeatureVector vector = new SparseFeatureVector(hasWeight);
    int i, size = tmp.length;
   
    for (i=1; i<size; i++)
      vector.addFeature(tmp[i]);
   
    return new Pair<String,SparseFeatureVector>(label, vector);
  }
View Full Code Here

TOP

Related Classes of com.clearnlp.classification.vector.SparseFeatureVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.