Package org.encog.util.csv

Examples of org.encog.util.csv.ReadCSV


      this.outputFormat = this.inputFormat;
    }

    resetStatus();
    int rc = 0;
    final ReadCSV csv = new ReadCSV(this.inputFilename.toString(),
        this.expectInputHeaders, this.inputFormat);
    while (csv.next() && !this.cancel) {
      updateStatus(true);
      rc++;
    }
    this.recordCount = rc;
    this.columnCount = csv.getColumnCount();

    readHeaders(csv);
    csv.close();
    reportDone(true);
  }
View Full Code Here


  /**
   * Read the CSV file.
   */
  private void readFile() {
    ReadCSV csv = null;

    try {
      csv = new ReadCSV(getInputFilename().toString(),
          isExpectInputHeaders(), getInputFormat());

      resetStatus();
      int row = 0;
      while (csv.next() && !shouldStop()) {
        updateStatus("Reading data");
        for (final BaseCachedColumn column : getColumns()) {
          if (column instanceof FileData) {
            if (column.isInput()) {
              final FileData fd = (FileData) column;
              final String str = csv.get(fd.getIndex());
              final double d = getInputFormat().parse(str);
              fd.getData()[row] = d;
            }
          }
        }
        row++;
      }
    } finally {
      reportDone("Reading data");
      if (csv != null) {
        csv.close();
      }
    }
  }
View Full Code Here

   * @param target The Encog analyst object to analyze.
   */
  public final void process(final EncogAnalyst target) {
    final CSVFormat csvFormat = ConvertStringConst
        .convertToCSVFormat(this.format);
    ReadCSV csv = new ReadCSV(this.filename, this.headers, csvFormat);

    // pass one, calculate the min/max
    while (csv.next()) {
      if (this.fields == null) {
        generateFields(csv);
      }

      for (int i = 0; i < csv.getColumnCount(); i++) {
        this.fields[i].analyze1(csv.get(i));
      }
    }

    for (final AnalyzedField field : this.fields) {
      field.completePass1();
    }

    csv.close();

    // pass two, standard deviation
    csv = new ReadCSV(this.filename, this.headers, csvFormat);
    while (csv.next()) {
      for (int i = 0; i < csv.getColumnCount(); i++) {
        this.fields[i].analyze2(csv.get(i));
      }
    }

    for (final AnalyzedField field : this.fields) {
      field.completePass2();
    }

    csv.close();

    String str = this.script.getProperties().getPropertyString(
        ScriptProperties.SETUP_CONFIG_ALLOWED_CLASSES);
    if (str == null) {
      str = "";
View Full Code Here

   * @param method THe method to use.
   */
  public final void process(final File outputFile,
      final MLMethod method) {

    final ReadCSV csv = new ReadCSV(getInputFilename().toString(),
        isExpectInputHeaders(), getInputFormat());

    MLData output = null;

    final int outputLength = this.analyst.determineUniqueColumns();

    final PrintWriter tw = this.prepareOutputFile(outputFile, this.analyst
        .getScript().getNormalize().countActiveFields() - 1, 1);

    resetStatus();
    while (csv.next()) {
      updateStatus(false);
      final LoadedRow row = new LoadedRow(csv, this.outputColumns);

      double[] inputArray = AnalystNormalizeCSV.extractFields(analyst,
          this.analystHeaders, csv, outputLength, false);
      if (this.series.getTotalDepth() > 1) {
        inputArray = this.series.process(inputArray);
      }

      if (inputArray != null) {
        final MLData input = new BasicMLData(inputArray);

        // evaluation data
        if ((method instanceof MLClassification)
            && !(method instanceof MLRegression)) {
          // classification only?
          output = new BasicMLData(1);
          output.setData(0,
              ((MLClassification) method).classify(input));
        } else {
          // regression
          output = ((MLRegression) method).compute(input);
        }

        // skip file data
        int index = this.fileColumns;
        int outputIndex = 0;

        // display output
        for (final AnalystField field : analyst.getScript()
            .getNormalize().getNormalizedFields()) {
          if (this.analystHeaders.find(field.getName()) != -1) {

            if (field.isOutput()) {
              if (field.isClassify()) {
                // classification
                final ClassItem cls = field.determineClass(
                    outputIndex, output.getData());
                outputIndex += field.getColumnsNeeded();
                if (cls == null) {
                  row.getData()[index++] = "?Unknown?";
                } else {
                  row.getData()[index++] = cls.getName();
                }
              } else {
                // regression
                double n = output.getData(outputIndex++);
                n = field.deNormalize(n);
                row.getData()[index++] = getInputFormat()
                    .format(n, getPrecision());
              }
            }
          }
        }
      }

      writeRow(tw, row);
    }
    reportDone(false);
    tw.close();
    csv.close();
  }
View Full Code Here

   */
  public void process(final EncogAnalyst target) {
    int count = 0;
    final CSVFormat csvFormat = ConvertStringConst
        .convertToCSVFormat(this.format);
    ReadCSV csv = new ReadCSV(this.filename, this.headers, csvFormat);
   
    // pass one, calculate the min/max
    while (csv.next()) {
      if (this.fields == null) {
        generateFields(csv);
      }

      for (int i = 0; i < csv.getColumnCount(); i++) {
        this.fields[i].analyze1(csv.get(i));
      }
      count++;
    }
   
    if( count==0 ) {
      throw new AnalystError("Can't analyze file, it is empty.");
    }


    for (final AnalyzedField field : this.fields) {
      field.completePass1();
    }

    csv.close();

    // pass two, standard deviation
    csv = new ReadCSV(this.filename, this.headers, csvFormat);
    while (csv.next()) {
      for (int i = 0; i < csv.getColumnCount(); i++) {
        this.fields[i].analyze2(csv.get(i));
      }
    }

    for (final AnalyzedField field : this.fields) {
      field.completePass2();
    }

    csv.close();

    String str = this.script.getProperties().getPropertyString(
        ScriptProperties.SETUP_CONFIG_ALLOWED_CLASSES);
    if (str == null) {
      str = "";
View Full Code Here

     
    // read the file
    this.rowCount = 0;
    this.missingCount = 0;
   
    ReadCSV csv = new ReadCSV(sourceFile.toString(),headers,inputFormat);
    while(csv.next()) {
      rowCount++;
      if( csv.hasMissing() )
        missingCount++;
    }
    csv.close();

  }
View Full Code Here

   * @param method THe method to use.
   */
  public void process(final File outputFile,
      final MLMethod method) {

    final ReadCSV csv = new ReadCSV(getInputFilename().toString(),
        isExpectInputHeaders(), getFormat());

    MLData output = null;
   
    for (final AnalystField field : analyst.getScript().getNormalize()
        .getNormalizedFields()) {
      field.init();
    }

    final int outputLength = this.analyst.determineTotalInputFieldCount();

    final PrintWriter tw = this.prepareOutputFile(method, outputFile, this.analyst
        .getScript().getNormalize().countActiveFields() - 1, 1);

    resetStatus();
    while (csv.next()) {
      updateStatus(false);
      final LoadedRow row = new LoadedRow(csv, this.outputColumns);

      double[] inputArray = AnalystNormalizeCSV.extractFields(analyst,
          this.analystHeaders, csv, outputLength, true);
      if (this.series.getTotalDepth() > 1) {
        inputArray = this.series.process(inputArray);
      }

      if (inputArray != null) {
        final MLData input = new BasicMLData(inputArray);

        // evaluation data
        if ((method instanceof MLClassification)
            && !(method instanceof MLRegression)) {
          // classification only?
          output = new BasicMLData(1);
          output.setData(0,
              ((MLClassification) method).classify(input));
        } else {
          // regression
          output = ((MLRegression) method).compute(input);
        }

        // skip file data
        int index = this.fileColumns;
        int outputIndex = 0;
       
        String otherOutput = "";
        if( method instanceof BayesianNetwork ) {
          otherOutput = ((BayesianNetwork)method).getClassificationTargetEvent().getLabel();
        }

        // display output
        for (final AnalystField field : analyst.getScript()
            .getNormalize().getNormalizedFields()) {
          if (this.analystHeaders.find(field.getName()) != -1) {

            if (field.isOutput() || field.getName().equals(otherOutput)) {
              if (field.isClassify()) {
                // classification
                final ClassItem cls = field.determineClass(
                    outputIndex, output.getData());
                outputIndex += field.getColumnsNeeded();
                if (cls == null) {
                  row.getData()[index++] = "?Unknown?";
                } else {
                  row.getData()[index++] = cls.getName();
                }
              } else {
                // regression
                double n = output.getData(outputIndex++);
                n = field.deNormalize(n);
                row.getData()[index++] = getFormat()
                    .format(n, getPrecision());
              }
            }
          }
        }
      }

      writeRow(tw, row);
    }
    reportDone(false);
    tw.close();
    csv.close();
  }
View Full Code Here

  /**
   * Read the CSV file.
   */
  private void readFile() {
    ReadCSV csv = null;

    try {
      csv = new ReadCSV(getInputFilename().toString(),
          isExpectInputHeaders(), getFormat());

      resetStatus();
      int row = 0;
      while (csv.next() && !shouldStop()) {
        updateStatus("Reading data");
        for (final BaseCachedColumn column : getColumns()) {
          if (column instanceof FileData) {
            if (column.isInput()) {
              final FileData fd = (FileData) column;
              final String str = csv.get(fd.getIndex());
              final double d = getFormat().parse(str);
              fd.getData()[row] = d;
            }
          }
        }
        row++;
      }
    } finally {
      reportDone("Reading data");
      if (csv != null) {
        csv.close();
      }
    }
  }
View Full Code Here

   * @param method The method to use.
   */
  public void process(final File outputFile,      
      final MLRegression method) {

    final ReadCSV csv = new ReadCSV(getInputFilename().toString(),
        isExpectInputHeaders(), getFormat());

    if (method.getInputCount() != this.inputCount) {
      throw new AnalystError("This machine learning method has "
          + method.getInputCount()
          + " inputs, however, the data has " + this.inputCount
          + " inputs.");
    }

    MLData output = null;
    final MLData input = new BasicMLData(method.getInputCount());

    final PrintWriter tw = analystPrepareOutputFile(outputFile);

    resetStatus();
    while (csv.next()) {
      updateStatus(false);
      final LoadedRow row = new LoadedRow(csv, this.idealCount);

      int dataIndex = 0;
      // load the input data
      for (int i = 0; i < this.inputCount; i++) {
        final String str = row.getData()[i];
        final double d = getFormat().parse(str);
        input.setData(i, d);
        dataIndex++;
      }

      // do we need to skip the ideal values?
      dataIndex += this.idealCount;

      // compute the result
      output = method.compute(input);

      // display the computed result
      for (int i = 0; i < this.outputCount; i++) {
        final double d = output.getData(i);
        row.getData()[dataIndex++] = getFormat().format(d,
            getPrecision());
      }

      writeRow(tw, row);
    }
    reportDone(false);
    tw.close();
    csv.close();
  }
View Full Code Here

      setExpectInputHeaders(headers);
      setInputFormat(format);
    }

    // now analyze columns
    ReadCSV csv = null;
    try {
      csv = new ReadCSV(input.toString(), headers, format);
      if (!csv.next()) {
        throw new QuantError("File is empty");
      }

      for (int i = 0; i < csv.getColumnCount(); i++) {
        String name;

        if (headers) {
          name = attemptResolveName(csv.getColumnNames().get(i));
        } else {
          name = "Column-" + (i + 1);
        }

        // determine if it should be an input/output field

        final String str = csv.get(i);

        boolean io = false;

        try {
          Double.parseDouble(str);
          io = true;
        } catch (final NumberFormatException ex) {
          EncogLogging.log(ex);
        }

        addColumn(new FileData(name, i, io, io));
      }
    } finally {
      csv.close();
      setAnalyzed(true);
    }
  }
View Full Code Here

TOP

Related Classes of org.encog.util.csv.ReadCSV

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.