Package org.encog.app.analyst.csv.basic

Examples of org.encog.app.analyst.csv.basic.LoadedRow


        .getScript().getNormalize().countActiveFields() - 1, 1);

    resetStatus();
    while (csv.next()) {
      updateStatus(false);
      final LoadedRow row = new LoadedRow(csv, this.outputColumns);

      double[] inputArray = AnalystNormalizeCSV.extractFields(analyst,
          this.analystHeaders, csv, outputLength, false);
      if (this.series.getTotalDepth() > 1) {
        inputArray = this.series.process(inputArray);
      }

      if (inputArray != null) {
        final MLData input = new BasicMLData(inputArray);

        // evaluation data
        if ((method instanceof MLClassification)
            && !(method instanceof MLRegression)) {
          // classification only?
          output = new BasicMLData(1);
          output.setData(0,
              ((MLClassification) method).classify(input));
        } else {
          // regression
          output = ((MLRegression) method).compute(input);
        }

        // skip file data
        int index = this.fileColumns;
        int outputIndex = 0;

        // display output
        for (final AnalystField field : analyst.getScript()
            .getNormalize().getNormalizedFields()) {
          if (this.analystHeaders.find(field.getName()) != -1) {

            if (field.isOutput()) {
              if (field.isClassify()) {
                // classification
                final ClassItem cls = field.determineClass(
                    outputIndex, output.getData());
                outputIndex += field.getColumnsNeeded();
                if (cls == null) {
                  row.getData()[index++] = "?Unknown?";
                } else {
                  row.getData()[index++] = cls.getName();
                }
              } else {
                // regression
                double n = output.getData(outputIndex++);
                n = field.deNormalize(n);
                row.getData()[index++] = getInputFormat()
                    .format(n, getPrecision());
              }
            }
          }
        }
View Full Code Here


        .getScript().getNormalize().countActiveFields() - 1, 1);

    resetStatus();
    while (csv.next()) {
      updateStatus(false);
      final LoadedRow row = new LoadedRow(csv, this.outputColumns);

      double[] inputArray = AnalystNormalizeCSV.extractFields(analyst,
          this.analystHeaders, csv, outputLength, true);
      if (this.series.getTotalDepth() > 1) {
        inputArray = this.series.process(inputArray);
      }

      if (inputArray != null) {
        final MLData input = new BasicMLData(inputArray);

        // evaluation data
        if ((method instanceof MLClassification)
            && !(method instanceof MLRegression)) {
          // classification only?
          output = new BasicMLData(1);
          output.setData(0,
              ((MLClassification) method).classify(input));
        } else {
          // regression
          output = ((MLRegression) method).compute(input);
        }

        // skip file data
        int index = this.fileColumns;
        int outputIndex = 0;
       
        String otherOutput = "";
        if( method instanceof BayesianNetwork ) {
          otherOutput = ((BayesianNetwork)method).getClassificationTargetEvent().getLabel();
        }

        // display output
        for (final AnalystField field : analyst.getScript()
            .getNormalize().getNormalizedFields()) {
          if (this.analystHeaders.find(field.getName()) != -1) {

            if (field.isOutput() || field.getName().equals(otherOutput)) {
              if (field.isClassify()) {
                // classification
                final ClassItem cls = field.determineClass(
                    outputIndex, output.getData());
                outputIndex += field.getColumnsNeeded();
                if (cls == null) {
                  row.getData()[index++] = "?Unknown?";
                } else {
                  row.getData()[index++] = cls.getName();
                }
              } else {
                // regression
                double n = output.getData(outputIndex++);
                n = field.deNormalize(n);
                row.getData()[index++] = getFormat()
                    .format(n, getPrecision());
              }
            }
          }
        }
View Full Code Here

    final PrintWriter tw = analystPrepareOutputFile(outputFile);

    resetStatus();
    while (csv.next()) {
      updateStatus(false);
      final LoadedRow row = new LoadedRow(csv, this.idealCount);

      int dataIndex = 0;
      // load the input data
      for (int i = 0; i < this.inputCount; i++) {
        final String str = row.getData()[i];
        final double d = getFormat().parse(str);
        input.setData(i, d);
        dataIndex++;
      }

      // do we need to skip the ideal values?
      dataIndex += this.idealCount;

      // compute the result
      output = method.compute(input);

      // display the computed result
      for (int i = 0; i < this.outputCount; i++) {
        final double d = output.getData(i);
        row.getData()[dataIndex++] = getFormat().format(d,
            getPrecision());
      }

      writeRow(tw, row);
    }
View Full Code Here

      final PrintWriter tw = prepareOutputFile(target.getFilename());

      while ((target.getNumberRemaining() > 0) && csv.next()
          && !shouldStop()) {
        updateStatus(false);
        final LoadedRow row = new LoadedRow(csv);
        writeRow(tw, row);
        target.setNumberRemaining(target.getNumberRemaining() - 1);
      }

      tw.close();
View Full Code Here

    int clusterNum = 0;
    for (final MLCluster cl : cluster.getClusters()) {
      for (final MLData item : cl.getData()) {
        final int clsIndex = item.size();
        final LoadedRow lr = new LoadedRow(this.getFormat(),item.getData(),1);
        lr.getData()[clsIndex] = "" + clusterNum;
        writeRow(tw, lr);
      }
      clusterNum++;
    }
View Full Code Here

    final ReadCSV csv = new ReadCSV(getInputFilename().toString(),
        isExpectInputHeaders(), getFormat());

    resetStatus();
    while (csv.next() && !shouldStop()) {
      final LoadedRow row = new LoadedRow(csv);
      updateStatus(false);
      final String key = row.getData()[targetField];
      int count;
      if (!this.counts.containsKey(key)) {
        count = 0;
      } else {
        count = this.counts.get(key);
View Full Code Here

    this.filteredCount = 0;

    resetStatus();
    while (csv.next() && !shouldStop()) {
      updateStatus(false);
      final LoadedRow row = new LoadedRow(csv);
      if (shouldProcess(row)) {
        writeRow(tw, row);
        this.filteredCount++;
      }
    }
View Full Code Here

    }

    while (this.remaining > 0) {
      final int index = RangeRandomizer.randomInt(0, this.bufferSize - 1);
      if (this.buffer[index] != null) {
        final LoadedRow result = this.buffer[index];
        this.buffer[index] = null;
        this.remaining--;
        return result;
      }
    }
View Full Code Here

      this.buffer[i] = null;
    }

    int index = 0;
    while (csv.next() && (index < this.bufferSize) && !shouldStop()) {
      final LoadedRow row = new LoadedRow(csv);
      this.buffer[index++] = row;
    }

    this.remaining = index;
  }
View Full Code Here

  public void process(final File outputFile) {
    validateAnalyzed();

    final ReadCSV csv = new ReadCSV(getInputFilename().toString(),
        isExpectInputHeaders(), getFormat());
    LoadedRow row;

    final PrintWriter tw = prepareOutputFile(outputFile);

    resetStatus();
    while ((row = getNextRow(csv)) != null) {
View Full Code Here

TOP

Related Classes of org.encog.app.analyst.csv.basic.LoadedRow

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.