Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.LongWritable


        }
      } finally {
        in.close();
      }

      collector.collect(new UTF8("bytes"), new LongWritable(read));

      reporter.setStatus("read " + name);
    }
View Full Code Here


      Path src = new Path(srcfilelist);
      FileSystem fs = src.getFileSystem(job);
      FileStatus srcst = fs.getFileStatus(src);

      ArrayList<FileSplit> splits = new ArrayList<FileSplit>(numSplits);
      LongWritable key = new LongWritable();
      FilePair value = new FilePair();
      final long targetsize = cbsize / numSplits;
      long pos = 0L;
      long last = 0L;
      long acc = 0L;
      long cbrem = srcst.getLen();
      SequenceFile.Reader sl = null;
      try {
        sl = new SequenceFile.Reader(fs, src, job);
        for (; sl.next(key, value); last = sl.getPosition()) {
          // if adding this split would put this split past the target size,
          // cut the last split and put this next file in the next split.
          if (acc + key.get() > targetsize && acc != 0) {
            long splitsize = last - pos;
            splits.add(new FileSplit(src, pos, splitsize, (String[])null));
            cbrem -= splitsize;
            pos = last;
            acc = 0L;
          }
          acc += key.get();
        }
      }
      finally {
        checkAndClose(sl);
      }
View Full Code Here

        sbuff.append(toWrite);
        for (String child: children) {
          sbuff.append(child + " ");
        }
        toWrite = sbuff.toString();
        srcWriter.append(new LongWritable(0L), new Text(toWrite));
      }
    }
  }
View Full Code Here

            toWrite = sbuff.toString();
          }
          else {
            toWrite +=  relPathToRoot(stat.getPath(), parentPath) + " file ";
          }
          srcWriter.append(new LongWritable(len), new
              Text(toWrite));
          srcWriter.sync();
          numFiles++;
          totalSize += len;
        }
View Full Code Here

                }
              }
            }

            if (!skipfile) {
              src_writer.append(new LongWritable(child.isDir()? 0: child.getLen()),
                  new FilePair(child, dst));
            }

            dst_writer.append(new Text(dst),
                new Text(child.getPath().toString()));
View Full Code Here

    }

    @Override
    public Writable key(TridentTuple tuple) {
        if(this.key == null){
            this.key  = new LongWritable();
        }
        this.key.set(tuple.getLongByField(this.keyField));
        return this.key;
    }
View Full Code Here

    }

    @Override
    public Writable key(Tuple tuple) {
        if(this.key == null){
            this.key  = new LongWritable();
        }
        this.key.set(tuple.getLongByField(this.keyField));
        return this.key;
    }
View Full Code Here

      super(split, inputClass, job, conn, dbConfig, cond, fields, table);
    }

    /** {@inheritDoc} */
    public LongWritable createKey() {
      return new LongWritable()
    }
View Full Code Here

    public void close() throws IOException {
      rr.close();
    }

    public LongWritable createKey() {
      return new LongWritable();
    }
View Full Code Here

          size = -size;
        size = size % maxSize;

        //LOG.info(" adding: name="+name+" size="+size);

        writer.append(name, new LongWritable(size));

        totalSize += size;
      }
    } finally {
      writer.close();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.LongWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.