Package fr.lip6.jkernelmachines.kernel.typed

Examples of fr.lip6.jkernelmachines.kernel.typed.DoubleGaussL2


   */
  @Before
  public void setUp() throws Exception {
    GaussianGenerator g = new GaussianGenerator(2, 1, 0.5);
    list = g.generateList(nb_samples);
    dk = new DoubleGaussL2();
    nk = new NystromKernel<double[]>(dk);
  }
View Full Code Here


  /**
   * @throws java.lang.Exception
   */
  @Before
  public void setUp() throws Exception {
    gaussl2 = new DoubleGaussL2();
  }
View Full Code Here

  /**
   * Test method for {@link fr.lip6.jkernelmachines.kernel.typed.DoubleGaussL2#DoubleGaussL2(double)}.
   */
  @Test
  public final void testDoubleGaussL2Double() {
    gaussl2 = new DoubleGaussL2(1.0);
    assertEquals(gaussl2.getGamma(), 1.0, 1e-15);
  }
View Full Code Here

          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      } else if ("HPlolynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleHPolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      }

      LaSVM<double[]> svm = new LaSVM<double[]>(k);
      svm.setC(Double.parseDouble(regularizationField.getText()));
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      double[] alphas = svm.getAlphas();
      int sv = 0;
      for (int s = 0; s < alphas.length; s++) {
        if (alphas[s] != 0) {
          sv++;
        }
      }
      svLabel.setText("" + sv);
      validate();
      // save current classifier
      model.classifier = svm;
    } else if ("smo".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      Kernel<double[]> k = new DoubleLinear();
      if ("GaussianL2".equalsIgnoreCase(kernelBox.getSelectedItem()
          .toString())) {
        k = new DoubleGaussL2(Double.parseDouble(kernelParamTextField
            .getText()));
      } else if ("TriangleL2".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleTriangleL2(
            Double.parseDouble(kernelParamTextField.getText()));
      } else if ("Polynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      } else if ("HPlolynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleHPolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      }

      SMOSVM<double[]> svm = new SMOSVM<double[]>(k);
      svm.setC(Double.parseDouble(regularizationField.getText()));
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      double[] alphas = svm.getAlphas();
      int sv = 0;
      for (int s = 0; s < alphas.length; s++) {
        if (alphas[s] != 0) {
          sv++;
        }
      }
      svLabel.setText("" + sv);
      validate();
      // save current classifier
      model.classifier = svm;
    } else if ("sag".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      DoubleSAG svm = new DoubleSAG();
      svm.setLambda(1. / (train.size() * Double
          .parseDouble(regularizationField.getText())));
      svm.setE(10);
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      svLabel.setText("N/A");

      // save current classifier
      model.classifier = svm;
    } else if ("pegasos".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      DoublePegasosSVM svm = new DoublePegasosSVM();

      svm.setLambda(1. / (train.size() * Double
          .parseDouble(regularizationField.getText())));
      svm.setK(train.size() / 20);
      svm.setT(10 * train.size());
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      svLabel.setText("N/A");

      // save current classifier
      model.classifier = svm;
    } else if ("simplemkl".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      SimpleMKL<double[]> svm = new SimpleMKL<double[]>();
      svm.setC(Double.parseDouble(regularizationField.getText()));

      double[] G = { 0.05, 0.1, 0.2, 0.4, 0.8, 1.6, 3.2, 6.4, 12.8, 25.6 };
//      int dim = train.get(0).sample.length;
      for (double g : G) {
        svm.addKernel(new DoubleGaussL2(g));
//        // for(int i = 0 ; i < dim ; i++) {
//        // IndexDoubleGaussL2 k = new IndexDoubleGaussL2(i);
//        // k.setGamma(g);
//        // svm.addKernel(k);
//        // }
      }
      for (int d = 1; d < 5; d++) {
        svm.addKernel(new DoublePolynomial(d));
        svm.addKernel(new DoubleHPolynomial(d));
      }
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
View Full Code Here

          i++;

          if (args[i].equalsIgnoreCase("gauss")) {
            kernel = new DoubleGaussL2();
          } else {
            kernel = new DoubleLinear();
          }
        }
        // algorithm
        else if (args[i].equalsIgnoreCase("-a")) {
          i++;
View Full Code Here

    }

    // train model
    if ("lasvm"
        .equalsIgnoreCase(classifierBox.getSelectedItem().toString())) {
      Kernel<double[]> k = new DoubleLinear();
      if ("GaussianL2".equalsIgnoreCase(kernelBox.getSelectedItem()
          .toString())) {
        k = new DoubleGaussL2(Double.parseDouble(kernelParamTextField
            .getText()));
      } else if ("TriangleL2".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleTriangleL2(
            Double.parseDouble(kernelParamTextField.getText()));
      } else if ("Polynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoublePolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      } else if ("HPlolynomial".equalsIgnoreCase(kernelBox
          .getSelectedItem().toString())) {
        k = new DoubleHPolynomial(Integer.parseInt(kernelParamTextField
            .getText()));
      }

      LaSVM<double[]> svm = new LaSVM<double[]>(k);
      svm.setC(Double.parseDouble(regularizationField.getText()));
      svm.train(localTrain);

      // info
      classnameLabel.setText(svm.getClass().getSimpleName());
      double[] alphas = svm.getAlphas();
      int sv = 0;
      for (int s = 0; s < alphas.length; s++) {
        if (alphas[s] != 0) {
          sv++;
        }
      }
      svLabel.setText("" + sv);
      validate();
      // save current classifier
      model.classifier = svm;
    } else if ("smo".equalsIgnoreCase(classifierBox.getSelectedItem()
        .toString())) {
      Kernel<double[]> k = new DoubleLinear();
      if ("GaussianL2".equalsIgnoreCase(kernelBox.getSelectedItem()
          .toString())) {
        k = new DoubleGaussL2(Double.parseDouble(kernelParamTextField
            .getText()));
      } else if ("TriangleL2".equalsIgnoreCase(kernelBox
View Full Code Here

   * @param kernel
   *            the kernel to be approximated
   */
  public NystromKernel(Kernel<T> kernel) {
    this.kernel = kernel;
    linear = new DoubleLinear();
  }
View Full Code Here

   * @param list
   */
  public static void normalizeList(List<TrainingSample<double[]>> list) {
    if(list.isEmpty())
      return;
    DoubleLinear linear = new DoubleLinear();
   
    for(TrainingSample<double[]> t : list) {
      double[] desc = t.sample;
      double norm = Math.sqrt(linear.valueOf(desc, desc));
      for(int x = 0 ; x < desc.length ; x++)
        desc[x] /= norm;
    }
   
  }
View Full Code Here

   * @param list
   */
  public static void normalizeDoubleList(List<double[]> list) {
    if(list.isEmpty())
      return;
    DoubleLinear linear = new DoubleLinear();
   
    for(double[] desc : list) {
      double norm = Math.sqrt(linear.valueOf(desc, desc));
      for(int x = 0 ; x < desc.length ; x++)
        desc[x] /= norm;
    }
   
  }
View Full Code Here

  @Test
  public final void testProjectListListOfTrainingSampleOfT() {
    double[][] m1 = k.getKernelMatrix(list);
   
    List<TrainingSample<double[]>> plist = pca.projectList(list);
    DoubleLinear lin = new DoubleLinear();
    double[][] m2 = lin.getKernelMatrix(plist);
   
    for(int i = 0 ; i < m1.length ; i++) {
      for(int j = i ; j < m1[0].length ; j++) {
        assertEquals(m1[i][j], m2[i][j]+pca.getMean(), 1e-10);
      }
View Full Code Here

TOP

Related Classes of fr.lip6.jkernelmachines.kernel.typed.DoubleGaussL2

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.