Package fr.lip6.jkernelmachines.kernel.typed

Examples of fr.lip6.jkernelmachines.kernel.typed.DoubleGaussL2


          .getSelectedItem().toString())) {
        k = new DoubleTriangleL2(
            Double.parseDouble(kernelParamTextField.getText()));
      } else if ("Polynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      } else if ("HPlolynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleHPolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      }

      LaSVM<double[]> svm = new LaSVM<double[]>(k);
      svm.setC(Double.parseDouble(regularizationField.getText()));
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      double[] alphas = svm.getAlphas();
      int sv = 0;
      for (int s = 0; s < alphas.length; s++) {
        if (alphas[s] != 0) {
          sv++;
        }
      }
      svLabel.setText("" + sv);
      validate();
      // save current classifier
      model.classifier = svm;
    } else if ("smo".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      Kernel<double[]> k = new DoubleLinear();
      if ("GaussianL2".equalsIgnoreCase(kernelBox.getSelectedItem()
          .toString())) {
        k = new DoubleGaussL2(Double.parseDouble(kernelParamTextField
            .getText()));
      } else if ("TriangleL2".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleTriangleL2(
            Double.parseDouble(kernelParamTextField.getText()));
      } else if ("Polynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      } else if ("HPlolynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleHPolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      }

      SMOSVM<double[]> svm = new SMOSVM<double[]>(k);
      svm.setC(Double.parseDouble(regularizationField.getText()));
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      double[] alphas = svm.getAlphas();
      int sv = 0;
      for (int s = 0; s < alphas.length; s++) {
        if (alphas[s] != 0) {
          sv++;
        }
      }
      svLabel.setText("" + sv);
      validate();
      // save current classifier
      model.classifier = svm;
    } else if ("sag".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      DoubleSAG svm = new DoubleSAG();
      svm.setLambda(1. / (train.size() * Double
          .parseDouble(regularizationField.getText())));
      svm.setE(10);
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      svLabel.setText("N/A");

      // save current classifier
      model.classifier = svm;
    } else if ("pegasos".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      DoublePegasosSVM svm = new DoublePegasosSVM();

      svm.setLambda(1. / (train.size() * Double
          .parseDouble(regularizationField.getText())));
      svm.setK(train.size() / 20);
      svm.setT(10 * train.size());
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      svLabel.setText("N/A");

      // save current classifier
      model.classifier = svm;
    } else if ("simplemkl".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      SimpleMKL<double[]> svm = new SimpleMKL<double[]>();
      svm.setC(Double.parseDouble(regularizationField.getText()));

      double[] G = { 0.05, 0.1, 0.2, 0.4, 0.8, 1.6, 3.2, 6.4, 12.8, 25.6 };
//      int dim = train.get(0).sample.length;
      for (double g : G) {
        svm.addKernel(new DoubleGaussL2(g));
//        // for(int i = 0 ; i < dim ; i++) {
//        // IndexDoubleGaussL2 k = new IndexDoubleGaussL2(i);
//        // k.setGamma(g);
//        // svm.addKernel(k);
//        // }
      }
      for (int d = 1; d < 5; d++) {
        svm.addKernel(new DoublePolynomial(d));
        svm.addKernel(new DoubleHPolynomial(d));
      }
      svm.train(localTrain);

      // info
View Full Code Here


          .toString())) {
        k = new DoubleGaussL2(Double.parseDouble(kernelParamTextField
            .getText()));
      } else if ("TriangleL2".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleTriangleL2(
            Double.parseDouble(kernelParamTextField.getText()));
      } else if ("Polynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      } else if ("HPlolynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleHPolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      }

      LaSVM<double[]> svm = new LaSVM<double[]>(k);
      svm.setC(Double.parseDouble(regularizationField.getText()));
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      double[] alphas = svm.getAlphas();
      int sv = 0;
      for (int s = 0; s < alphas.length; s++) {
        if (alphas[s] != 0) {
          sv++;
        }
      }
      svLabel.setText("" + sv);
      validate();
      // save current classifier
      model.classifier = svm;
    } else if ("smo".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      Kernel<double[]> k = new DoubleLinear();
      if ("GaussianL2".equalsIgnoreCase(kernelBox.getSelectedItem()
          .toString())) {
        k = new DoubleGaussL2(Double.parseDouble(kernelParamTextField
            .getText()));
      } else if ("TriangleL2".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleTriangleL2(
            Double.parseDouble(kernelParamTextField.getText()));
      } else if ("Polynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
View Full Code Here

//      weights[i] = 1.0/(dim*dim);
    }
   
   
    //1 train first svm
    GeneralizedDoubleGaussL2 kernel = new GeneralizedDoubleGaussL2(weights);
    svm = trainSVM(kernel);
    double[] a = svm.getAlphas();
    //update lambda matrix before objective computation
    updateLambdaMatrix(a, kernel);
    //compute old value of objective function
    oldObjective = computeObj(a);
    debug.println(2, "+ initial objective : "+oldObjective);
    debug.println(3, "+ initial weights : "+Arrays.toString(weights));
   
    //2. big loop
    double gap = 0;
    do
    {           
      //perform one step
      double objEvol = performPKLStep();
     
      if(objEvol < 0)
      {
        debug.println(1, "Error, performPKLStep return wrong value");
        System.exit(0);;
      }
      gap = 1 - objEvol;
     
      debug.println(1, "+ objective_gap : "+(float)gap);
      debug.println(1, "+");
     
    }
    while(gap >= stopGap);
   
   
    //3. get minimal objective svm and weights
    listOfKernelWeights = new ArrayList<Double>();
    for(int i = 0 ; i < weights.length; i++)
      listOfKernelWeights.add(weights[i]);
    kernel = new GeneralizedDoubleGaussL2(weights);
    svm = trainSVM(kernel);
    //update lambdamatrix
    a = svm.getAlphas();
    updateLambdaMatrix(a, kernel);
   
View Full Code Here

    //store new as old for the loop
    double objective = oldObjective;
    double[] oldWeights = weights;
   
    //train new svm
    GeneralizedDoubleGaussL2 k = new GeneralizedDoubleGaussL2(weights);
    LaSVM<double[]> svm = trainSVM(k);
    //update lambdamatrix
    double[] a = svm.getAlphas();
    updateLambdaMatrix(a, k);
   
    //compute grad
    double[] gNew = computeGrad(k);
   
    //estimate B
    double[] B = computeB(gNew);
   
    double lambda = 1.;
    do
    {
      //1. update weights.
      double[] wNew = new double[weights.length];
      double Z = 0;
      for(int x = 0 ; x < wNew.length ; x++) {
        wNew[x] = weights[x] - lambda * B[x] * gNew[x];
        if(wNew[x] < num_cleaning)
          wNew[x] = 0;
        if(hasNorm)
          Z += wNew[x];
      }
     
      if(hasNorm) {
        for(int x = 0 ; x < wNew.length ; x++)
          wNew[x] /= Z;
      }
       
     
      //2. retrain SVM
      k = new GeneralizedDoubleGaussL2(wNew);
      svm = trainSVM(k);
      //update lambdamatrix
      a = svm.getAlphas();
      updateLambdaMatrix(a, k);
     
View Full Code Here

  @Test
  public final void testTrainListOfT() {
    DoubleGaussL2 k = new DoubleGaussL2();
    SimpleMKLDensity<double[]> de = new SimpleMKLDensity<double[]>();
    for(int i = 0 ; i < 2 ; i++) {
      de.addKernel(new IndexDoubleGaussL2(i));
    }
    de.addKernel(k);
    de.train(train);

    for (double[] x : train) {
View Full Code Here

    DoubleGaussianMixtureModel gmm = new DoubleGaussianMixtureModel(1);
    gmm.train(train);

    SimpleMKLDensity<double[]> mkl = new SimpleMKLDensity<double[]>();
    for (int x = 0; x < dimension; x++) {
      mkl.addKernel(new IndexDoubleGaussL2(x));
    }
    mkl.setC(100);
    mkl.train(train);

    ArrayList<double[]> test = new ArrayList<double[]>();
View Full Code Here

      printHelp();
      System.exit(-1);
    }
    // perform PCA
    if(hasPCA == 1) {
      DoublePCA pca = new DoublePCA();
      pca.train(list);
      list = pca.projectList(list);
    }
    else if(hasPCA == 2) {
      DoublePCA pca = new DoublePCA();
      pca.train(list);
      list = pca.projectList(list, true);
    }

    // initialize CV
    AccuracyEvaluator<double[]> ev = new AccuracyEvaluator<double[]>();
    RandomSplitCrossValidation<double[]> cv = new RandomSplitCrossValidation<double[]>(
View Full Code Here

    model.dim = train.get(0).sample.length;

    // perform preprocessing
    List<TrainingSample<double[]>> localTrain;
    if (pcaBox.isSelected()) {
      model.pca = new DoublePCA();
      model.pca.train(train);
      model.pcaEnable = true;
      model.whiteningEnable = whiteBox.isSelected();
      localTrain = model.pca.projectList(train, whiteBox.isSelected());
    } else {
View Full Code Here

    GaussianGenerator gen = new GaussianGenerator(dim);
    list = gen.generateList(nbSamples);
   
   
    pca = new DoublePCA();
    pca.train(list);
   
  }
View Full Code Here

      rmap.put(index, s);
      index++;
    }
   
    //computing matrix       
    ThreadedMatrixOperator factory = new ThreadedMatrixOperator()
    {
      @Override
      public void doLines(double[][] matrix, int from, int to) {
        for(int index = from ; index < to ; index++)
        {
          //reverse search through mapping S <-> index
          S s1 = rmap.get(index);
          //mapped signature
          T t1 = signatures.get(s1);

          //all mapping S <-> T
          for(Iterator<S> iter = map.keySet().iterator() ; iter.hasNext() ;)
          {
            S s2 = iter.next();
            //get index of s2
            int j = map.get(s2);
            //get signature of s2
            T t2 = signatures.get(s2);
            //add value of kernel
            matrix[index][j] = kernel.valueOf(t1, t2);
          }
        }
      };
    };


    /* do the actuel computing of the matrix */
    matrix = factory.getMatrix(matrix);
       
  }
View Full Code Here

TOP

Related Classes of fr.lip6.jkernelmachines.kernel.typed.DoubleGaussL2

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.