Examples of LineReader


Examples of org.apache.hadoop.util.LineReader

      Text key = new Text();
      for (FileStatus file: files) {
        Path path = file.getPath();
        FileSystem fs = path.getFileSystem(job.getConfiguration());
        LineReader reader = new LineReader(fs.open(path));
        long pos = 0;
        int n;
        try {
          while ((n = reader.readLine(key)) > 0) {
            String[] hosts = getStoreDirHosts(fs, path);
            splits.add(new FileSplit(path, pos, n, hosts));
            pos += n;
          }
        } finally {
          reader.close();
        }
      }

      return splits;
    }
View Full Code Here

Examples of org.apache.hadoop.util.LineReader

     
      for (FileStatus file: listStatus(job)) {
        Path path = file.getPath();
        FileSystem fs = path.getFileSystem(job.getConfiguration());
        FSDataInputStream fileIn = fs.open(path);
        LineReader in = new LineReader(fileIn, job.getConfiguration());
        int lineLen = 0;
        while(true) {
          Text lineText = new Text();
          lineLen = in.readLine(lineText);
          if(lineLen <= 0) {
          break;
          }
          Matcher m = LINE_PATTERN.matcher(lineText.toString());
          if((m != null) && m.matches()) {
            int startRow = Integer.parseInt(m.group(1));
            int rows = Integer.parseInt(m.group(2));
            int totalRows = Integer.parseInt(m.group(3));
            int clients = Integer.parseInt(m.group(4));
            int rowsPerPut = Integer.parseInt(m.group(5));

            LOG.debug("split["+ splitList.size() + "] " +
                     " startRow=" + startRow +
                     " rows=" + rows +
                     " totalRows=" + totalRows +
                     " clients=" + clients +
                     " rowsPerPut=" + rowsPerPut);

            PeInputSplit newSplit =
              new PeInputSplit(startRow, rows, totalRows, clients, rowsPerPut);
            splitList.add(newSplit);
          }
        }
        in.close();
      }
     
      LOG.info("Total # of splits: " + splitList.size());
      return splitList;
    }
View Full Code Here

Examples of org.apache.hadoop.util.LineReader

      for (FileStatus file: listStatus(job)) {
        Path path = file.getPath();
        FileSystem fs = path.getFileSystem(job.getConfiguration());
        FSDataInputStream fileIn = fs.open(path);
        LineReader in = new LineReader(fileIn, job.getConfiguration());
        int lineLen = 0;
        while(true) {
          Text lineText = new Text();
          lineLen = in.readLine(lineText);
          if(lineLen <= 0) {
          break;
          }
          Matcher m = LINE_PATTERN.matcher(lineText.toString());
          if((m != null) && m.matches()) {
            int startRow = Integer.parseInt(m.group(1));
            int rows = Integer.parseInt(m.group(2));
            int totalRows = Integer.parseInt(m.group(3));
            int clients = Integer.parseInt(m.group(4));
            boolean flushCommits = Boolean.parseBoolean(m.group(5));
            boolean writeToWAL = Boolean.parseBoolean(m.group(6));

            LOG.debug("split["+ splitList.size() + "] " +
                     " startRow=" + startRow +
                     " rows=" + rows +
                     " totalRows=" + totalRows +
                     " clients=" + clients +
                     " flushCommits=" + flushCommits +
                     " writeToWAL=" + writeToWAL);

            PeInputSplit newSplit =
              new PeInputSplit(startRow, rows, totalRows, clients,
                flushCommits, writeToWAL);
            splitList.add(newSplit);
          }
        }
        in.close();
      }

      LOG.info("Total # of splits: " + splitList.size());
      return splitList;
    }
View Full Code Here

Examples of org.apache.hadoop.util.LineReader

  private static Path workDir =
    new Path(new Path(System.getProperty("test.build.data", "/tmp")),
             "TestConcatenatedCompressedInput").makeQualified(localFs);

  private static LineReader makeStream(String str) throws IOException {
    return new LineReader(new ByteArrayInputStream(str.getBytes("UTF-8")),
                          defaultConf);
  }
View Full Code Here

Examples of org.apache.hadoop.util.LineReader

    final FileInputStream in2 = new FileInputStream(fnLocal2.toString());
    assertEquals("concat bytes available", 2734, in1.available());
    assertEquals("concat bytes available", 3413, in2.available()); // w/hdr CRC

    CompressionInputStream cin2 = gzip.createInputStream(in2);
    LineReader in = new LineReader(cin2);
    Text out = new Text();

    int numBytes, totalBytes=0, lineNum=0;
    while ((numBytes = in.readLine(out)) > 0) {
      ++lineNum;
      totalBytes += numBytes;
    }
    in.close();
    assertEquals("total uncompressed bytes in concatenated test file",
                 5346, totalBytes);
    assertEquals("total uncompressed lines in concatenated test file",
                 84, lineNum);
View Full Code Here

Examples of org.crsh.text.LineReader

          public int getMinHeight(int width) {
            throw new UnsupportedOperationException();
          }
          @Override
          public LineReader reader(final int width) {
            return new LineReader() {

              boolean done = false;

              public boolean hasLine() {
                return !done;
View Full Code Here

Examples of org.hsqldb.lib.LineReader

        } else {
            InputStream stream =
                crypto.getInputStream(new BufferedInputStream(inputStream));

            stream       = new GZIPInputStream(stream);
            dataStreamIn = new LineReader(stream, ScriptWriterText.ISO_8859_1);
        }
    }
View Full Code Here

Examples of org.jitterbit.io.LineReader

       
        private void processFile(final File originalFile) throws IOException {
            File outputFile = new File(originalFile.getParent(), originalFile.getName() + "2");
            final BufferedWriter w = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFile), UTF_8));
            try {
                new LineReader(originalFile, UTF_8) {
                   
                    @Override
                    protected void processLine(String line) {
                        try {
                            String out = translateLine(line);
View Full Code Here

Examples of org.jstripe.tomcat.probe.tools.LineReader

                    ff.getLines().add(listSize, " ------------- THE FILE HAS BEEN TRUNCATED --------------");
                }

                BackwardsFileStream bfs = new BackwardsFileStream(f, currentLength);
                try {
                    LineReader br = new LineReader(bfs, true);
                    String s;
                    while (readSize < currentLength - ff.getLastKnowLength() && (s = br.readLine()) != null) {
                        if (ff.getLines().size() >= maxLines) {
                            if (listSize > 0) {
                                ff.getLines().remove(0);
                                listSize--;
                            } else {
View Full Code Here

Examples of org.tmatesoft.hg.internal.LineReader

    queueNames = Collections.emptyList();
    final LogFacility log = repo.getSessionContext().getLog();
    try {
      File queues = repo.getFileFromRepoDir("patches.queues");
      if (queues.isFile()) {
        LineReader lr = new LineReader(queues, log).trimLines(true).skipEmpty(true);
        lr.read(new LineReader.SimpleLineCollector(), queueNames = new LinkedList<String>());
      }
      final String queueLocation; // path under .hg to patch queue information (status, series and diff files)
      File activeQueueFile = repo.getFileFromRepoDir("patches.queue");
      // file is there only if it's not default queue ('patches') that is active
      if (activeQueueFile.isFile()) {
        ArrayList<String> contents = new ArrayList<String>();
        new LineReader(activeQueueFile, log).read(new LineReader.SimpleLineCollector(), contents);
        if (contents.isEmpty()) {
          log.dump(getClass(), Warn, "File %s with active queue name is empty", activeQueueFile.getName());
          activeQueue = PATCHES_DIR;
          queueLocation = PATCHES_DIR + '/';
        } else {
          activeQueue = contents.get(0);
          queueLocation = PATCHES_DIR + '-' + activeQueue +  '/';
        }
      } else {
        activeQueue = PATCHES_DIR;
        queueLocation = PATCHES_DIR + '/';
      }
      final Path.Source patchLocation = new Path.Source() {
       
        public Path path(CharSequence p) {
          StringBuilder sb = new StringBuilder(64);
          sb.append(".hg/");
          sb.append(queueLocation);
          sb.append(p);
          return Path.create(sb);
        }
      };
      final File fileStatus = repo.getFileFromRepoDir(queueLocation + "status");
      final File fileSeries = repo.getFileFromRepoDir(queueLocation + "series");
      if (fileStatus.isFile()) {
        new LineReader(fileStatus, log).read(new LineReader.LineConsumer<List<PatchRecord>>() {
 
          public boolean consume(String line, List<PatchRecord> result) throws IOException {
            int sep = line.indexOf(':');
            if (sep == -1) {
              log.dump(MqManager.class, Warn, "Bad line in %s:%s", fileStatus.getPath(), line);
              return true;
            }
            Nodeid nid = Nodeid.fromAscii(line.substring(0, sep));
            String name = new String(line.substring(sep+1));
            result.add(new PatchRecord(nid, name, patchLocation.path(name)));
            return true;
          }
        }, applied = new LinkedList<PatchRecord>());
      }
      if (fileSeries.isFile()) {
        final Map<String,PatchRecord> name2patch = new HashMap<String, PatchRecord>();
        for (PatchRecord pr : applied) {
          name2patch.put(pr.getName(), pr);
        }
        LinkedList<String> knownPatchNames = new LinkedList<String>();
        new LineReader(fileSeries, log).read(new LineReader.SimpleLineCollector(), knownPatchNames);
        // XXX read other queues?
        allKnown = new ArrayList<PatchRecord>(knownPatchNames.size());
        for (String name : knownPatchNames) {
          PatchRecord pr = name2patch.get(name);
          if (pr == null) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.