Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FSDataInputStream.readInt()


    FSDataInputStream in = fs.open(path);

    byte[] bytes;
    ObjectInputStream obj;

    bytes = new byte[in.readInt()];
    LOG.info("Loading front-coded list of terms: " + bytes.length + " bytes.");
    in.readFully(bytes);
    obj = new ObjectInputStream(new ByteArrayInputStream(bytes));
    try {
      stringList = (FrontCodedStringList) obj.readObject();
View Full Code Here


    } catch (ClassNotFoundException e) {
      throw new RuntimeException(e);
    }
    obj.close();

    bytes = new byte[in.readInt()];
    LOG.info("Loading dictionary hash: " + bytes.length + " bytes.");
    in.readFully(bytes);
    obj = new ObjectInputStream(new ByteArrayInputStream(bytes));
    try {
      stringHash = (ShiftAddXorSignedStringMap) obj.readObject();
View Full Code Here

    } catch (ClassNotFoundException e) {
      throw new RuntimeException(e);
    }
    obj.close();

    int n = in.readInt();
    for (int i = 0; i < n; i++) {
      int id = in.readInt();
      intList.add(id);
      int2PosMap.put(id, i);
    }
View Full Code Here

    }
    obj.close();

    int n = in.readInt();
    for (int i = 0; i < n; i++) {
      int id = in.readInt();
      intList.add(id);
      int2PosMap.put(id, i);
    }
    LOG.info("Finished loading.");
View Full Code Here

    // Read and throw away.
    in.readUTF();
    collectionPath = in.readUTF();

    // Docnos start at one, so we need an array that's one larger than number of docs.
    int sz = in.readInt() + 1;
    offsets = new long[sz];
    lengths = new int[sz];

    for (int i = 1; i < sz; i++) {
      offsets[i] = in.readLong();
View Full Code Here

    offsets = new long[sz];
    lengths = new int[sz];

    for (int i = 1; i < sz; i++) {
      offsets[i] = in.readLong();
      lengths[i] = in.readInt();
    }
    in.close();

    input = fs.open(new Path(collectionPath));
    docnoMapping.loadMapping(mapping, fs);
View Full Code Here

    // Read and throw away.
    in.readUTF();
    path = in.readUTF();

    // Docnos start at one, so we need an array that's one larger than number of docs.
    int sz = in.readInt() + 1;
    offsets = new long[sz];
    lengths = new int[sz];

    for (int i = 1; i < sz; i++) {
      offsets[i] = in.readLong();
View Full Code Here

    offsets = new long[sz];
    lengths = new int[sz];

    for (int i = 1; i < sz; i++) {
      offsets[i] = in.readLong();
      lengths[i] = in.readInt();
    }
    in.close();

    input = fs.open(new Path(path));
    docnoMapping.loadMapping(mapping, fs);
View Full Code Here

    Preconditions.checkNotNull(fs);

    FSDataInputStream in = fs.open(p);

    // The docnos start at one, so we need an array that's one larger than number of docs.
    int sz = in.readInt() + 1;
    String[] arr = new String[sz];

    for (int i = 1; i < sz; i++) {
      arr[i] = in.readUTF();
    }
View Full Code Here

    FSDataInputStream in = fs.open(forestPath);

    try {
      // partitions' sizes
      for (int p = 0; p < nbMappers; p++) {
        assertEquals(splits[p].length, in.readInt());
      }

      // load (key, tree)
      TreeID key = new TreeID();
      for (int index = 0; index < nbTrees; index++) {
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.