Package org.encog.util.csv

Examples of org.encog.util.csv.ReadCSV


      final int index) {
    double result = 0;

    if( field instanceof InputFieldCSVText ) {
      final InputFieldCSVText fieldCSV = (InputFieldCSVText) field;
      final ReadCSV csv = this.csvMap.get(field);
      String v = csv.get(fieldCSV.getOffset());
      if( !fieldCSV.getMappings().containsKey(v) ) {
        throw new NormalizationError("Undefined class value: " + v);
      } else {
        result =  fieldCSV.getMappings().get(v);
      }
    } else if (field instanceof InputFieldCSV) {
      final InputFieldCSV fieldCSV = (InputFieldCSV) field;
      final ReadCSV csv = this.csvMap.get(field);
      result = csv.getDouble(fieldCSV.getOffset());
    } else if (field instanceof InputFieldMLDataSet) {
      final InputFieldMLDataSet neuralField =
        (InputFieldMLDataSet) field;
      final MLDataFieldHolder holder = this.dataSetFieldMap
          .get(field);
View Full Code Here


    for (final InputField field : this.inputFields) {
      if (field instanceof InputFieldCSV) {
        final InputFieldCSV csvField = (InputFieldCSV) field;
        final File file = csvField.getFile();
        if (!uniqueFiles.containsKey(file)) {
          final ReadCSV csv = new ReadCSV(file.toString(), false,
              this.csvFormat);
          uniqueFiles.put(file, csv);
          this.readCSV.add(csv);
        }
        this.csvMap.put(csvField, uniqueFiles.get(file));
View Full Code Here

            File binFile, int[] input, int[] ideal,
            boolean headers)
   {

       binFile.delete();
       ReadCSV csv = new ReadCSV(csvFile.toString(), headers, format);
      
       BufferedMLDataSet buffer = new BufferedMLDataSet(binFile);
       buffer.beginLoad(input.length, ideal.length);
       while(csv.next())
       {
         BasicMLData inputData = new BasicMLData(input.length);
         BasicMLData idealData = new BasicMLData(ideal.length);
        
         // handle input data
         for(int i=0;i<input.length;i++) {
           inputData.setData(i, csv.getDouble(input[i]));
         }
        
         // handle input data
         for(int i=0;i<ideal.length;i++) {
           idealData.setData(i, csv.getDouble(ideal[i]));
         }
        
         // add to dataset
        
           buffer.add(inputData,idealData);
View Full Code Here

   * @return A NeuralDataSet that holds the contents of the CSV file.
   */
  public static MLDataSet loadCSVTOMemory(CSVFormat format,
      String filename, boolean headers, int inputSize, int idealSize) {
    MLDataSet result = new BasicMLDataSet();
    ReadCSV csv = new ReadCSV(filename, headers, format);
    while (csv.next()) {
      MLData input = null;
      MLData ideal = null;
      int index = 0;

      input = new BasicMLData(inputSize);
      for (int i = 0; i < inputSize; i++) {
        double d = csv.getDouble(index++);
        input.setData(i, d);
      }

      if (idealSize > 0) {
        ideal = new BasicMLData(idealSize);
        for (int i = 0; i < idealSize; i++) {
          double d = csv.getDouble(index++);
          ideal.setData(i, d);
        }
      }

      MLDataPair pair = new BasicMLDataPair(input, ideal);
View Full Code Here

    * @param headers False if headers are not extended.
   * @param format The CSV format.
   */
  public CSVHeaders(final File filename, final boolean headers,
      final CSVFormat format) {
    ReadCSV csv = null;
    try {
      csv = new ReadCSV(filename.toString(), headers, format);
      if (csv.next()) {
        if (headers) {
          for (final String str : csv.getColumnNames()) {
            this.headerList.add(str);
          }
        } else {
          for (int i = 0; i < csv.getColumnCount(); i++) {
            this.headerList.add("field:" + (i + 1));
          }
        }
      }

      init();

    } finally {
      if (csv != null) {
        csv.close();
      }
    }
  }
View Full Code Here

    final CSVFormat format = this.analyst.getScript().determineFormat();

    CSVHeaders analystHeaders = new CSVHeaders(file, headers,
        format);
   
    ReadCSV csv = new ReadCSV(file.toString(), headers, format);
   
    for (final AnalystField field : analyst.getScript().getNormalize()
        .getNormalizedFields()) {
      field.init();
    }

    TimeSeriesUtil series = new TimeSeriesUtil(analyst,true,
        analystHeaders.getHeaders());
   

    try {
      // write file contents
      while (csv.next()) {

        double[] output = AnalystNormalizeCSV.extractFields(
            this.analyst, analystHeaders, csv, totalCount,
            false);

        if (series.getTotalDepth() > 1) {
          output = series.process(output);
        }

        MLDataPair pair = BasicMLDataPair.createPair(inputCount,outputCount);
        for(int i=0;i<inputCount;i++) {
          pair.getInput().setData(i, output[i]);
        }
        for(int i=0;i<outputCount;i++) {
          pair.getIdeal().setData(i, output[i+inputCount]);
        }
        result.add(pair);
      }
      return result;
    } finally {
      if (csv != null) {
        try {
          csv.close();
        } catch (final Exception ex) {
          EncogLogging.log(ex);
        }
      }
    }
View Full Code Here

   * Process the input file and segregate into the output files.
   */
  public void process() {
    validate();

    final ReadCSV csv = new ReadCSV(getInputFilename().toString(),
        isExpectInputHeaders(), getFormat());
    resetStatus();
    for (final SegregateTargetPercent target : this.targets) {
      final PrintWriter tw = prepareOutputFile(target.getFilename());

      while ((target.getNumberRemaining() > 0) && csv.next()
          && !shouldStop()) {
        updateStatus(false);
        final LoadedRow row = new LoadedRow(csv);
        writeRow(tw, row);
        target.setNumberRemaining(target.getNumberRemaining() - 1);
      }

      tw.close();
    }
    reportDone(false);
    csv.close();
  }
View Full Code Here

    this.data = new BasicMLDataSet();
    resetStatus();
    int recordCount = 0;

    final int outputLength = this.analyst.determineTotalColumns();
    final ReadCSV csv = new ReadCSV(this.getInputFilename().toString(),
        this.isExpectInputHeaders(), this.getFormat());
    readHeaders(csv);

    this.analystHeaders = new CSVHeaders(this.getInputHeadings());

    while (csv.next() && !shouldStop()) {
      updateStatus(true);
      final double[] inputArray = AnalystNormalizeCSV.extractFields(
          analyst, this.analystHeaders, csv, outputLength, true);
      final MLData input = new BasicMLData(inputArray);
      this.data.add(new BasicMLDataPair(input));

      recordCount++;
    }
    setRecordCount(recordCount);
    this.setColumnCount(csv.getColumnCount());

    readHeaders(csv);
    csv.close();
    reportDone(true);
  }
View Full Code Here

   */
  public void performBasicCounts() {

    resetStatus();
    int rc = 0;
    final ReadCSV csv = new ReadCSV(this.inputFilename.toString(),
        this.expectInputHeaders, this.format);
    while (csv.next() && !this.cancel) {
      updateStatus(true);
      rc++;
    }
    this.recordCount = rc;
    this.columnCount = csv.getColumnCount();

    readHeaders(csv);
    csv.close();
    reportDone(true);
  }
View Full Code Here

    if (this.analyst == null) {
      throw new EncogError(
          "Can't normalize yet, file has not been analyzed.");
    }

    ReadCSV csv = null;
    PrintWriter tw = null;

    try {
      csv = new ReadCSV(getInputFilename().toString(),
          isExpectInputHeaders(), getFormat());

      tw = new PrintWriter(new FileWriter(file));

      // write headers, if needed
      if (isProduceOutputHeaders()) {
        writeHeaders(tw);
      }

      resetStatus();
      final int outputLength = this.analyst.determineTotalColumns();

      // write file contents
      while (csv.next() && !shouldStop()) {
        updateStatus(false);

        double[] output = AnalystNormalizeCSV.extractFields(
            this.analyst, this.analystHeaders, csv, outputLength,
            false);

        if (this.series.getTotalDepth() > 1) {
          output = this.series.process(output);
        }

        if (output != null) {
          final StringBuilder line = new StringBuilder();
          NumberList.toList(getFormat(), line, output);
          tw.println(line);
        }
      }
    } catch (final IOException e) {
      throw new QuantError(e);
    } finally {
      reportDone(false);
      if (csv != null) {
        try {
          csv.close();
        } catch (final Exception ex) {
          EncogLogging.log(ex);
        }
      }
View Full Code Here

TOP

Related Classes of org.encog.util.csv.ReadCSV

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.