Package weka.core

Examples of weka.core.DenseInstance


    int count = 0;
    double [] vals = new double[3];
    vals[count++] = margin;
    vals[count++] = current;
    vals[count++] = cumulative;
    return new DenseInstance(1.0, vals);
  }
View Full Code Here


    } else {
    vals[count++] = tc.getTruePositive() / expectedByChance;
    
    }
    vals[count++] = prob;
    return new DenseInstance(1.0, vals);
  }
View Full Code Here

      fpval = threshInst.instance(i).value(fpind);
      tpval = threshInst.instance(i).value(tpind);
      thresh = threshInst.instance(i).value(threshind);
      vals = new double [3];
      vals[0] = 0; vals[1] = fpval; vals[2] = thresh;
      insts.add(new DenseInstance(1.0, vals));
      vals = new double [3];
      vals[0] = 1; vals[1] = 1.0 - tpval; vals[2] = thresh;
      insts.add(new DenseInstance(1.0, vals));
    }
   
    return insts;
  }
View Full Code Here

    double[] vals = new double[2];
    vals[0] = SVMOutput(-1, inst);
    if (inst.classValue() == cl2) {
      vals[1] = 1;
    }
    data.add(new DenseInstance(inst.weight(), vals));
  }
      } else {

  // Check whether number of folds too large
  if (numFolds > insts.numInstances()) {
    numFolds = insts.numInstances();
  }

  // Make copy of instances because we will shuffle them around
  insts = new Instances(insts);
 
  // Perform three-fold cross-validation to collect
  // unbiased predictions
  insts.randomize(random);
  insts.stratify(numFolds);
  for (int i = 0; i < numFolds; i++) {
    Instances train = insts.trainCV(numFolds, i, random);
          /*    SerializedObject so = new SerializedObject(this);
                  BinarySMO smo = (BinarySMO)so.getObject(); */
          BinarySMO smo = new BinarySMO();
          smo.setKernel(Kernel.makeCopy(SMO.this.m_kernel));
          smo.buildClassifier(train, cl1, cl2, false, -1, -1);
    Instances test = insts.testCV(numFolds, i);
    for (int j = 0; j < test.numInstances(); j++) {
      double[] vals = new double[2];
      vals[0] = smo.SVMOutput(-1, test.instance(j));
      if (test.instance(j).classValue() == cl2) {
        vals[1] = 1;
      }
      data.add(new DenseInstance(test.instance(j).weight(), vals));
    }
  }
      }

      // Build logistic regression model
View Full Code Here

    values[i++] = dist[j];
  }
      }
    }
    values[i] = instance.classValue();
    metaInstance = new DenseInstance(1, values);
    metaInstance.setDataset(m_MetaFormat);
    return metaInstance;
  }
View Full Code Here

    x = x * (getMaxRange() - getMinRange()) + getMinRange();
   
    // generate y
    atts    = new double[1];
    atts[0] = x;
    inst    = new DenseInstance(1.0, atts);
    m_Filter.input(inst);
    m_Filter.batchFinished();
    inst = m_Filter.output();
   
    // noise
    y = inst.value(1) + getAmplitude()
            * m_NoiseRandom.nextGaussian()
            * getNoiseRate() * getNoiseVariance();

    // generate attributes
    atts = new double[m_DatasetFormat.numAttributes()];
   
    atts[0] = x;
    atts[1] = y;
    result = new DenseInstance(1.0, atts);

    // dataset reference
    result.setDataset(m_DatasetFormat);
   
    return result;
View Full Code Here

      if (inst.numClasses() == 2) {
  double[] newInst = new double[2];
  newInst[0] = m_classifiers[0][1].SVMOutput(-1, inst);
  newInst[1] = Utils.missingValue();
  return m_classifiers[0][1].m_logistic.
    distributionForInstance(new DenseInstance(1, newInst));
      }
      double[][] r = new double[inst.numClasses()][inst.numClasses()];
      double[][] n = new double[inst.numClasses()][inst.numClasses()];
      for (int i = 0; i < inst.numClasses(); i++) {
  for (int j = i + 1; j < inst.numClasses(); j++) {
    if ((m_classifiers[i][j].m_alpha != null) ||
        (m_classifiers[i][j].m_sparseWeights != null)) {
      double[] newInst = new double[2];
      newInst[0] = m_classifiers[i][j].SVMOutput(-1, inst);
      newInst[1] = Utils.missingValue();
      r[i][j] = m_classifiers[i][j].m_logistic.
        distributionForInstance(new DenseInstance(1, newInst))[0];
      n[i][j] = m_classifiers[i][j].m_sumOfWeights;
    }
  }
      }
      return weka.classifiers.meta.MultiClassClassifier.pairwiseCoupling(n, r);
View Full Code Here

            * m_NoiseRandom.nextGaussian()
            * getNoiseRate() * getNoiseVariance();

    atts[0] = x;
    atts[1] = y;
    result = new DenseInstance(1.0, atts);

    // dataset reference
    result.setDataset(m_DatasetFormat);
   
    return result;
View Full Code Here

   
    // create instance
    if (sparse)
      result = new SparseInstance(weight, values);
    else
      result = new DenseInstance(weight, values);
   
    return result;
  }
View Full Code Here

    for(int k=0; k<node2.anchor.numValues(); k++) {
      if(node2.anchor.index(k)==classIdx)
        continue;
      attrVals[k] += node2.anchor.valueSparse(k)*anchr2Ratio;
    }
    temp = new DenseInstance(1.0, attrVals);
    return temp;
  }
View Full Code Here

TOP

Related Classes of weka.core.DenseInstance

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.