Package org.encog.util.csv

Examples of org.encog.util.csv.ReadCSV


    * @param headers False if headers are not extended.
   * @param format The CSV format.
   */
  public CSVHeaders(final File filename, final boolean headers,
      final CSVFormat format) {
    ReadCSV csv = null;
    try {
      csv = new ReadCSV(filename.toString(), headers, format);
      if (csv.next()) {
        if (headers) {
          for (final String str : csv.getColumnNames()) {
            this.headerList.add(str);
          }
        } else {
          for (int i = 0; i < csv.getColumnCount(); i++) {
            this.headerList.add("field:" + (i + 1));
          }
        }
      }

      init();

    } finally {
      if (csv != null) {
        csv.close();
      }
    }
  }
View Full Code Here


    int rowSize = this.axisMapping.size();
   
    boolean regression = !this.targetField.isClass();
   
    // read the file
    ReadCSV csv = new ReadCSV(sourceFile.toString(),headers,inputFormat);
   
    while(csv.next() ) {
      double[] row = new double[rowSize];
      List<double[]> dataList;
     
      // find a list for this class
      String cls = "?";
     
      if( regression ) {
        double d = csv.getDouble(targetIndex);
        for(int i=this.series.size()-1;i>=0;i--) {
          if( d>this.regressionSeriesPoint[i] ) {
            cls = this.series.get(i);
            break;
          }
        }
      } else {
        cls = csv.get(this.targetIndex);
        cls = cls.toLowerCase();
      }
           
      if( this.data.containsKey(cls) ) {
        dataList = this.data.get(cls);
      } else {
        dataList = new ArrayList<double[]>();
        this.data.put(cls, dataList);
      }
     
      // read the data row
      int rowIndex = 0;
      for(String key: this.axis) {
        int index = this.axisMapping.get(key);
        double d = csv.getDouble(index);
        row[rowIndex++] = d;
      }
     
      // store the data row
      dataList.add(row);
    }
   
    csv.close();
  }
View Full Code Here

   * @param method The method to use.
   */
  public final void process(final File outputFile,      
      final MLRegression method) {

    final ReadCSV csv = new ReadCSV(getInputFilename().toString(),
        isExpectInputHeaders(), getInputFormat());

    if (method.getInputCount() != this.inputCount) {
      throw new AnalystError("This machine learning method has "
          + method.getInputCount()
          + " inputs, however, the data has " + this.inputCount
          + " inputs.");
    }

    MLData output = null;
    final MLData input = new BasicMLData(method.getInputCount());

    final PrintWriter tw = analystPrepareOutputFile(outputFile);

    resetStatus();
    while (csv.next()) {
      updateStatus(false);
      final LoadedRow row = new LoadedRow(csv, this.idealCount);

      int dataIndex = 0;
      // load the input data
      for (int i = 0; i < this.inputCount; i++) {
        final String str = row.getData()[i];
        final double d = getInputFormat().parse(str);
        input.setData(i, d);
        dataIndex++;
      }

      // do we need to skip the ideal values?
      dataIndex += this.idealCount;

      // compute the result
      output = method.compute(input);

      // display the computed result
      for (int i = 0; i < this.outputCount; i++) {
        final double d = output.getData(i);
        row.getData()[dataIndex++] = getInputFormat().format(d,
            getPrecision());
      }

      writeRow(tw, row);
    }
    reportDone(false);
    tw.close();
    csv.close();
  }
View Full Code Here

    validateAnalyzed();
    final PrintWriter tw = prepareOutputFile(outputFile);

    this.counts = new HashMap<String, Integer>();

    final ReadCSV csv = new ReadCSV(getInputFilename().toString(),
        isExpectInputHeaders(), getInputFormat());

    resetStatus();
    while (csv.next() && !shouldStop()) {
      final LoadedRow row = new LoadedRow(csv);
      updateStatus(false);
      final String key = row.getData()[targetField];
      int count;
      if (!this.counts.containsKey(key)) {
        count = 0;
      } else {
        count = this.counts.get(key);
      }

      if (count < countPer) {
        writeRow(tw, row);
        count++;
      }

      this.counts.put(key, count);
    }
    reportDone(false);
    csv.close();
    tw.close();
  }
View Full Code Here

   *            The output file.
   */
  public final void process(final File outputFile) {
    validateAnalyzed();

    final ReadCSV csv = new ReadCSV(getInputFilename().toString(),
        isExpectInputHeaders(), getInputFormat());
    LoadedRow row;

    final PrintWriter tw = prepareOutputFile(outputFile);

    resetStatus();
    while ((row = getNextRow(csv)) != null) {
      writeRow(tw, row);
      updateStatus(false);
    }
    reportDone(false);
    tw.close();
    csv.close();
  }
View Full Code Here

    this.data = new BasicMLDataSet();
    resetStatus();
    int recordCount = 0;

    final int outputLength = this.analyst.determineUniqueColumns();
    final ReadCSV csv = new ReadCSV(this.getInputFilename().toString(),
        this.isExpectInputHeaders(), this.getInputFormat());
    readHeaders(csv);

    this.analystHeaders = new CSVHeaders(this.getInputHeadings());

    while (csv.next() && !shouldStop()) {
      updateStatus(true);

      final LoadedRow row = new LoadedRow(csv, 1);

      final double[] inputArray = AnalystNormalizeCSV.extractFields(
          analyst, this.analystHeaders, csv, outputLength, true);
      final ClusterRow input = new ClusterRow(inputArray, row);
      this.data.add(input);

      recordCount++;
    }
    setRecordCount(recordCount);
    this.setColumnCount(csv.getColumnCount());

    readHeaders(csv);
    csv.close();
    reportDone(true);
  }
View Full Code Here

      setExpectInputHeaders(headers);
      setInputFormat(format);
    }

    // now analyze columns
    ReadCSV csv = null;
    try {
      csv = new ReadCSV(input.toString(), headers, format);
      if (!csv.next()) {
        throw new QuantError("File is empty");
      }

      for (int i = 0; i < csv.getColumnCount(); i++) {
        String name;

        if (headers) {
          name = attemptResolveName(csv.getColumnNames().get(i));
        } else {
          name = "Column-" + (i + 1);
        }

        // determine if it should be an input/output field

        final String str = csv.get(i);

        boolean io = false;

        try {
          Double.parseDouble(str);
          io = true;
        } catch (final NumberFormatException ex) {
          EncogLogging.log(ex);
        }

        addColumn(new FileData(name, i, io, io));
      }
    } finally {
      csv.close();
      setAnalyzed(true);
    }
  }
View Full Code Here

    if (this.analyst == null) {
      throw new EncogError(
          "Can't normalize yet, file has not been analyzed.");
    }

    ReadCSV csv = null;
    PrintWriter tw = null;

    try {
      csv = new ReadCSV(getInputFilename().toString(),
          isExpectInputHeaders(), getInputFormat());

      tw = new PrintWriter(new FileWriter(file));

      // write headers, if needed
      if (isProduceOutputHeaders()) {
        writeHeaders(tw);
      }

      resetStatus();
      final int outputLength = this.analyst.determineUniqueColumns();

      // write file contents
      while (csv.next() && !shouldStop()) {
        updateStatus(false);

        double[] output = AnalystNormalizeCSV.extractFields(
            this.analyst, this.analystHeaders, csv, outputLength,
            false);

        if (this.series.getTotalDepth() > 1) {
          output = this.series.process(output);
        }

        if (output != null) {
          final StringBuilder line = new StringBuilder();
          NumberList.toList(getOutputFormat(), line, output);
          tw.println(line);
        }
      }
    } catch (final IOException e) {
      throw new QuantError(e);
    } finally {
      reportDone(false);
      if (csv != null) {
        try {
          csv.close();
        } catch (final Exception ex) {
          EncogLogging.log(ex);
        }
      }
View Full Code Here

      final File output,
      final CSVFormat outputFormat, final Date from, final Date to) {
    try {
      final URL url = buildURL(ticker, from, to);
      final InputStream is = url.openStream();
      final ReadCSV csv = new ReadCSV(is, true, CSVFormat.ENGLISH);

      final PrintWriter tw = new PrintWriter(new FileWriter(output));
      tw.println(
    "date,time,open price,high price,low price,"
          + "close price,volume,adjusted price");

      while (csv.next() && !shouldStop()) {
        final Date date = csv.getDate("date");
        final double adjClose = csv.getDouble("adj close");
        final double open = csv.getDouble("open");
        final double close = csv.getDouble("close");
        final double high = csv.getDouble("high");
        final double low = csv.getDouble("low");
        final double volume = csv.getDouble("volume");

        final NumberFormat df = NumberFormat.getInstance();
        df.setGroupingUsed(false);

        final StringBuilder line = new StringBuilder();
View Full Code Here

   * Read the input file.
   */
  private void readInputFile() {
    resetStatus();

    final ReadCSV csv = new ReadCSV(getInputFilename().toString(),
        isExpectInputHeaders(), getInputFormat());
    while (csv.next() && !shouldStop()) {
      updateStatus("Reading input file");
      final LoadedRow row = new LoadedRow(csv);
      this.data.add(row);
    }

    setColumnCount(csv.getColumnCount());

    if (isExpectInputHeaders()) {
      setInputHeadings(new String[csv.getColumnNames().size()]);
      for (int i = 0; i < csv.getColumnNames().size(); i++) {
        getInputHeadings()[i] = csv.getColumnNames().get(i);
      }
    }

    csv.close();
  }
View Full Code Here

TOP

Related Classes of org.encog.util.csv.ReadCSV

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.