Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FSDataInputStream.readInt()


    offsets = new int[blocks];
    filenos = new short[blocks];

    for (int i = 0; i < blocks; i++) {
      docnos[i] = in.readInt();
      offsets[i] = in.readInt();
      filenos[i] = in.readShort();
    }

    in.close();
  }
View Full Code Here


  }

  public TTable_sliced(FileSystem fs, Path p) throws IOException {
    _fs = fs; _datapath = p;
    FSDataInputStream in = _fs.open(_datapath.suffix(Path.SEPARATOR + "metadata.bin"));
    _data = new IndexedFloatArray[in.readInt()];
    in.close();
  }

  public TTable_sliced(int e_voc_size, FileSystem fs, Path p) {
    _fs = fs; _datapath = p;
View Full Code Here

  static public String[] readDocnoData(Path p, FileSystem fs) throws IOException {
    LOG.warn("p: " + p);
    FSDataInputStream in = fs.open(p);

    // Docnos start at one, so we need an array that's one larger than number of docs.
    int sz = in.readInt() + 1;
    LOG.warn("creating array of length: " + sz);
    String[] arr = new String[sz];

    for (int i = 1; i < sz; i++) {
      arr[i] = in.readUTF();
View Full Code Here

    FSDataInputStream in = fs.open(p);

    List<Integer> ids = null;
    int lastOffset = -1;

    int sz = in.readInt();
    docids = new int[105 * 50][];
    offsets = new int[105 * 50];

    for (int i = 0; i < 105 * 50; i++) {
      offsets[i] = -1;
View Full Code Here

   */
  static public int[] readDocnoMappingData(Path p, FileSystem fs) throws IOException {
    FSDataInputStream in = fs.open(p);

    // docnos start at one, so we need an array that's one larger than number of docs.
    int sz = in.readInt() + 1;
    int[] arr = new int[sz];

    for (int i = 1; i < sz; i++) {
      arr[i] = in.readInt();
    }
View Full Code Here

    // docnos start at one, so we need an array that's one larger than number of docs.
    int sz = in.readInt() + 1;
    int[] arr = new int[sz];

    for (int i = 1; i < sz; i++) {
      arr[i] = in.readInt();
    }
    in.close();

    arr[0] = 0;
View Full Code Here

    FSDataInputStream in = fs.open(p);

    List<Integer> ids = null;
    int lastOffset = -1;

    int sz = in.readInt();
    docids = new int[273 * 100][];
    offsets = new int[273 * 100];

    for (int i = 0; i < 273 * 100; i++) {
      offsets[i] = -1;
View Full Code Here

    offsets = new int[blocks];
    fileno = new short[blocks];

    for (int i = 0; i < blocks; i++) {
      docnos[i] = in.readInt();
      offsets[i] = in.readInt();
      fileno[i] = in.readShort();

      if (i > 0 && i % 100000 == 0)
        LOG.info(i + " blocks read");
    }
View Full Code Here

    LOG.info("Reading docid mapping...");

    FSDataInputStream in = fs.open(p);
    // docnos start at one, so we need an array that's one larger than
    // number of docs
    int sz = in.readInt() + 1;
    int[] arr = new int[sz];
    int cnt = 0;
    for (int i = 1; i < sz; i++) {
      arr[i] = in.readInt();
View Full Code Here

    // number of docs
    int sz = in.readInt() + 1;
    int[] arr = new int[sz];
    int cnt = 0;
    for (int i = 1; i < sz; i++) {
      arr[i] = in.readInt();

      if (i % 500000 == 0) {
        LOG.info(i);
      }
      cnt++;
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.