Package fr.lip6.jkernelmachines.kernel.typed

Examples of fr.lip6.jkernelmachines.kernel.typed.DoubleLinear


   * @param list
   */
  public static void normalizeDoubleList(List<double[]> list) {
    if(list.isEmpty())
      return;
    DoubleLinear linear = new DoubleLinear();
   
    for(double[] desc : list) {
      double norm = Math.sqrt(linear.valueOf(desc, desc));
      for(int x = 0 ; x < desc.length ; x++)
        desc[x] /= norm;
    }
   
  }
View Full Code Here


  @Test
  public final void testProjectListListOfTrainingSampleOfT() {
    double[][] m1 = k.getKernelMatrix(list);
   
    List<TrainingSample<double[]>> plist = pca.projectList(list);
    DoubleLinear lin = new DoubleLinear();
    double[][] m2 = lin.getKernelMatrix(plist);
   
    for(int i = 0 ; i < m1.length ; i++) {
      for(int j = i ; j < m1[0].length ; j++) {
        assertEquals(m1[i][j], m2[i][j]+pca.getMean(), 1e-10);
      }
View Full Code Here

    int nbNegTrain = 100;
    int nbPosTest = 200;
    int nbNegTest = 1000;
    double maxcv = 50;
   
    DoubleLinear k = new DoubleLinear();
   
    Random ran = new Random(System.currentTimeMillis());
    double posstart = 0.8;
    double negstart = -0.8;
   
    double pegerr = 0;
    double pegstd = 0;
    double tpegerr = 0;
    double tpegstd = 0;
    for(int cv = 0 ; cv < maxcv; cv++)
    {
      ArrayList<TrainingSample<double[]>> train = new ArrayList<TrainingSample<double[]>>();
      //1. generate positive train samples
      for(int i = 0 ; i < nbPosTrain; i++)
      {
        double[] t = new double[dimension];
        for(int x = 0 ; x < dimension; x++)
        {
          t[x] = posstart + ran.nextGaussian();
        }

        train.add(new TrainingSample<double[]>(t, 1));
      }
      //2. generate negative train samples
      for(int i = 0 ; i < nbNegTrain; i++)
      {
        double[] t = new double[dimension];
        for(int x = 0 ; x < dimension; x++)
        {
          t[x] = negstart + ran.nextGaussian();
        }

        train.add(new TrainingSample<double[]>(t, -1));
      }

      ArrayList<TrainingSample<double[]>> test = new ArrayList<TrainingSample<double[]>>();
      //4. generate positive test samples
      for(int i = 0 ; i < nbPosTest; i++)
      {
        double[] t = new double[dimension];
        for(int x = 0 ; x < dimension; x++)
        {
          t[x] = posstart + ran.nextGaussian();
        }

        test.add(new TrainingSample<double[]>(t, 1));
      }
      //5. generate negative test samples
      for(int i = 0 ; i < nbNegTest; i++)
      {
        double[] t = new double[dimension];
        for(int x = 0 ; x < dimension; x++)
        {
          t[x] = negstart + ran.nextGaussian();
        }

        test.add(new TrainingSample<double[]>(t, -1));
      }

      int T = 10*train.size()/10;
      double t0 = 1.e2;
      double lambda = 1e-2;
//      double lambdat = 1e-1;
      boolean bias = false;
      int K = 10;
     
      //3.1 train pegasos
      DoublePegasosSVM peg = new DoublePegasosSVM();
      peg.setLambda(lambda);
      peg.setK(K);
      peg.setT(T);
      peg.setT0(t0);
      peg.setBias(bias);
      peg.train(train);


      //3.2 train transductive pegasos
//      DoubleGaussL2 kernel = new DoubleGaussL2();
//      kernel.setGamma(0.01);
      S3VMLightPegasos tpeg = new S3VMLightPegasos();
      tpeg.setLambda(lambda);
      tpeg.setK(K);
      tpeg.setT(T);
      tpeg.setT0(t0);
      tpeg.setBias(bias);
//      tpeg.setVerbosityLevel(3);
      tpeg.setNumplus(200);
      tpeg.train(train, test);
     

      //6. test svm
      int nbErr = 0;
      int tpegErr = 0;
      for(TrainingSample<double[]> t : test)
      {
        int y = t.label;
        double value = peg.valueOf(t.sample);
        if(y*value < 0)
          nbErr++;
        double pegVal = tpeg.valueOf(t.sample);
        if(y*pegVal < 0)
        {
          tpegErr++;
//          System.out.println("y : "+y+" value : "+value+" nbErr : "+nbErr+" pegVal : "+pegVal+" pegErr : "+tpegErr);
        }

//        System.out.println("y : "+y+" value : "+value+" nbErr : "+nbErr+" pegVal : "+pegVal+" pegErr : "+tpegErr);


      }

      pegerr += nbErr;
      pegstd += nbErr*nbErr;
      tpegerr += tpegErr;
      tpegstd += tpegErr*tpegErr;

      //7.1 compute w for pegasos
      double w[] = peg.getW();
      System.out.println("peg : w : "+Arrays.toString(w));
      System.out.println("peg : bias : "+peg.getB());
      System.out.println("peg : ||w|| : "+k.valueOf(w, w));

      //7.2 w from transductive pegasos
      System.out.println("tpeg : w : "+Arrays.toString(tpeg.getW()));
      System.out.println("tpeg : bias : "+tpeg.getB());
      System.out.println("tpeg : ||w|| : "+k.valueOf(tpeg.getW(), tpeg.getW()));

      //8. comparing smo and peg
      System.out.println("< peg, tpeg > : "+(k.valueOf(w, tpeg.getW())/Math.sqrt(k.valueOf(w, w)*k.valueOf(tpeg.getW(), tpeg.getW()))));
    }
   
    //final stats
    System.out.println();
    pegstd = Math.sqrt(pegstd/maxcv - (pegerr/maxcv)*(pegerr/maxcv));
 
View Full Code Here

 
  private DoubleLinear linear;
 
  @Before
  public void setUp() {
    linear = new DoubleLinear();
  }
View Full Code Here

   
    // generate data
    GaussianGenerator gen = new GaussianGenerator(2);
    List<TrainingSample<double[]>> list = gen.generateList(5, 10);
   
    DoubleLinear lin = new DoubleLinear();
    double[][] matrix = lin.getKernelMatrix(list);
   
    double[][] train = new double[5][5];
    double[][] test = new double[10][10];
   
    // fill train
View Full Code Here

          .getSelectedItem().toString())) {
        k = new DoubleTriangleL2(
            Double.parseDouble(kernelParamTextField.getText()));
      } else if ("Polynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      } else if ("HPlolynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleHPolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      }

      LaSVM<double[]> svm = new LaSVM<double[]>(k);
      svm.setC(Double.parseDouble(regularizationField.getText()));
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      double[] alphas = svm.getAlphas();
      int sv = 0;
      for (int s = 0; s < alphas.length; s++) {
        if (alphas[s] != 0) {
          sv++;
        }
      }
      svLabel.setText("" + sv);
      validate();
      // save current classifier
      model.classifier = svm;
    } else if ("smo".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      Kernel<double[]> k = new DoubleLinear();
      if ("GaussianL2".equalsIgnoreCase(kernelBox.getSelectedItem()
          .toString())) {
        k = new DoubleGaussL2(Double.parseDouble(kernelParamTextField
            .getText()));
      } else if ("TriangleL2".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleTriangleL2(
            Double.parseDouble(kernelParamTextField.getText()));
      } else if ("Polynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      } else if ("HPlolynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleHPolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      }

      SMOSVM<double[]> svm = new SMOSVM<double[]>(k);
      svm.setC(Double.parseDouble(regularizationField.getText()));
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      double[] alphas = svm.getAlphas();
      int sv = 0;
      for (int s = 0; s < alphas.length; s++) {
        if (alphas[s] != 0) {
          sv++;
        }
      }
      svLabel.setText("" + sv);
      validate();
      // save current classifier
      model.classifier = svm;
    } else if ("sag".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      DoubleSAG svm = new DoubleSAG();
      svm.setLambda(1. / (train.size() * Double
          .parseDouble(regularizationField.getText())));
      svm.setE(10);
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      svLabel.setText("N/A");

      // save current classifier
      model.classifier = svm;
    } else if ("pegasos".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      DoublePegasosSVM svm = new DoublePegasosSVM();

      svm.setLambda(1. / (train.size() * Double
          .parseDouble(regularizationField.getText())));
      svm.setK(train.size() / 20);
      svm.setT(10 * train.size());
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      svLabel.setText("N/A");

      // save current classifier
      model.classifier = svm;
    } else if ("simplemkl".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      SimpleMKL<double[]> svm = new SimpleMKL<double[]>();
      svm.setC(Double.parseDouble(regularizationField.getText()));

      double[] G = { 0.05, 0.1, 0.2, 0.4, 0.8, 1.6, 3.2, 6.4, 12.8, 25.6 };
//      int dim = train.get(0).sample.length;
      for (double g : G) {
        svm.addKernel(new DoubleGaussL2(g));
//        // for(int i = 0 ; i < dim ; i++) {
//        // IndexDoubleGaussL2 k = new IndexDoubleGaussL2(i);
//        // k.setGamma(g);
//        // svm.addKernel(k);
//        // }
      }
      for (int d = 1; d < 5; d++) {
        svm.addKernel(new DoublePolynomial(d));
        svm.addKernel(new DoubleHPolynomial(d));
      }
      svm.train(localTrain);

      // info
View Full Code Here

          .toString())) {
        k = new DoubleGaussL2(Double.parseDouble(kernelParamTextField
            .getText()));
      } else if ("TriangleL2".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleTriangleL2(
            Double.parseDouble(kernelParamTextField.getText()));
      } else if ("Polynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      } else if ("HPlolynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleHPolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      }

      LaSVM<double[]> svm = new LaSVM<double[]>(k);
      svm.setC(Double.parseDouble(regularizationField.getText()));
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      double[] alphas = svm.getAlphas();
      int sv = 0;
      for (int s = 0; s < alphas.length; s++) {
        if (alphas[s] != 0) {
          sv++;
        }
      }
      svLabel.setText("" + sv);
      validate();
      // save current classifier
      model.classifier = svm;
    } else if ("smo".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      Kernel<double[]> k = new DoubleLinear();
      if ("GaussianL2".equalsIgnoreCase(kernelBox.getSelectedItem()
          .toString())) {
        k = new DoubleGaussL2(Double.parseDouble(kernelParamTextField
            .getText()));
      } else if ("TriangleL2".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleTriangleL2(
            Double.parseDouble(kernelParamTextField.getText()));
      } else if ("Polynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
View Full Code Here

//      weights[i] = 1.0/(dim*dim);
    }
   
   
    //1 train first svm
    GeneralizedDoubleGaussL2 kernel = new GeneralizedDoubleGaussL2(weights);
    svm = trainSVM(kernel);
    double[] a = svm.getAlphas();
    //update lambda matrix before objective computation
    updateLambdaMatrix(a, kernel);
    //compute old value of objective function
    oldObjective = computeObj(a);
    debug.println(2, "+ initial objective : "+oldObjective);
    debug.println(3, "+ initial weights : "+Arrays.toString(weights));
   
    //2. big loop
    double gap = 0;
    do
    {           
      //perform one step
      double objEvol = performPKLStep();
     
      if(objEvol < 0)
      {
        debug.println(1, "Error, performPKLStep return wrong value");
        System.exit(0);;
      }
      gap = 1 - objEvol;
     
      debug.println(1, "+ objective_gap : "+(float)gap);
      debug.println(1, "+");
     
    }
    while(gap >= stopGap);
   
   
    //3. get minimal objective svm and weights
    listOfKernelWeights = new ArrayList<Double>();
    for(int i = 0 ; i < weights.length; i++)
      listOfKernelWeights.add(weights[i]);
    kernel = new GeneralizedDoubleGaussL2(weights);
    svm = trainSVM(kernel);
    //update lambdamatrix
    a = svm.getAlphas();
    updateLambdaMatrix(a, kernel);
   
View Full Code Here

    //store new as old for the loop
    double objective = oldObjective;
    double[] oldWeights = weights;
   
    //train new svm
    GeneralizedDoubleGaussL2 k = new GeneralizedDoubleGaussL2(weights);
    LaSVM<double[]> svm = trainSVM(k);
    //update lambdamatrix
    double[] a = svm.getAlphas();
    updateLambdaMatrix(a, k);
   
    //compute grad
    double[] gNew = computeGrad(k);
   
    //estimate B
    double[] B = computeB(gNew);
   
    double lambda = 1.;
    do
    {
      //1. update weights.
      double[] wNew = new double[weights.length];
      double Z = 0;
      for(int x = 0 ; x < wNew.length ; x++) {
        wNew[x] = weights[x] - lambda * B[x] * gNew[x];
        if(wNew[x] < num_cleaning)
          wNew[x] = 0;
        if(hasNorm)
          Z += wNew[x];
      }
     
      if(hasNorm) {
        for(int x = 0 ; x < wNew.length ; x++)
          wNew[x] /= Z;
      }
       
     
      //2. retrain SVM
      k = new GeneralizedDoubleGaussL2(wNew);
      svm = trainSVM(k);
      //update lambdamatrix
      a = svm.getAlphas();
      updateLambdaMatrix(a, k);
     
View Full Code Here

  @Test
  public final void testTrainListOfT() {
    DoubleGaussL2 k = new DoubleGaussL2();
    SimpleMKLDensity<double[]> de = new SimpleMKLDensity<double[]>();
    for(int i = 0 ; i < 2 ; i++) {
      de.addKernel(new IndexDoubleGaussL2(i));
    }
    de.addKernel(k);
    de.train(train);

    for (double[] x : train) {
View Full Code Here

TOP

Related Classes of fr.lip6.jkernelmachines.kernel.typed.DoubleLinear

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.