Package edu.umd.cloud9.io.map

Examples of edu.umd.cloud9.io.map.HMapIDW


          "-output_path=" + indexRootPath + "/wiki-docid-tmp",
          "-output_file=" + mappingFile.toString(),
          "-wiki_language=" + collectionLang };
      LOG.info("Running BuildWikipediaDocnoMapping with args " + Arrays.toString(arr));

      BuildWikipediaDocnoMapping tool = new BuildWikipediaDocnoMapping();
      tool.setConf(conf);
      tool.run(arr);

      fs.delete(new Path(indexRootPath + "/wiki-docid-tmp"), true);
    } else {
      LOG.info("Docno mapping already exists at: " + mappingFile);
    }

    // Repack Wikipedia into sequential compressed block
    if (!fs.exists(new Path(seqCollection + "/part-00000"))) {
      LOG.info(seqCollection + " doesn't exist, creating...");
      String[] arr = new String[] { "-input=" + rawCollection,
          "-output=" + seqCollection,
          "-mapping_file=" + mappingFile.toString(),
          "-compression_type=block",
          "-wiki_language=" + collectionLang };
      LOG.info("Running RepackWikipedia with args " + Arrays.toString(arr));

      RepackWikipedia tool = new RepackWikipedia();
      tool.setConf(conf);
      tool.run(arr);
    } else {
      LOG.info("Repacked collection already exists at: " + seqCollection);     
    }

    conf.set(Constants.CollectionName, "Wikipedia-"+collectionLang);
View Full Code Here


        "-mapping_file=" + mappingFile.toString(),
        "-compression_type=block",
        "-wiki_language=" + collectionLang };
    LOG.info("Running RepackWikipedia with args " + Arrays.toString(arr));

    RepackWikipedia tool = new RepackWikipedia();
    tool.setConf(conf);
    tool.run(arr);

    conf.set(Constants.CollectionName, "Wikipedia-"+collectionLang);
    conf.setInt(Constants.NumMapTasks, numMappers);
    conf.setInt(Constants.NumReduceTasks, numReducers);
    conf.set(Constants.CollectionPath, seqCollection);
View Full Code Here

    // Repack Wikipedia into sequential compressed block
    p = new Path(seqCollection);
    if (!fs.exists(p)) {
      LOG.info(seqCollection + " doesn't exist, creating...");
      String[] arr = new String[] { rawCollection, seqCollection, mappingFile.toString(), "block"};
      RepackWikipedia tool = new RepackWikipedia();
      tool.setConf(conf);
      tool.run(arr);
    }

    conf.set("Ivory.CollectionName", "Wikipedia-"+collectionLang);
    conf.setInt("Ivory.NumMapTasks", numMappers);
    conf.setInt("Ivory.NumReduceTasks", numReducers);
View Full Code Here

          "-mapping_file=" + mappingFile.toString(),
          "-compression_type=block",
          "-wiki_language=" + collectionLang };
      LOG.info("Running RepackWikipedia with args " + Arrays.toString(arr));

      RepackWikipedia tool = new RepackWikipedia();
      tool.setConf(conf);
      tool.run(arr);
    } else {
      LOG.info("Repacked collection already exists at: " + seqCollection);     
    }

    conf.set(Constants.CollectionName, "Wikipedia-"+collectionLang);
View Full Code Here

        if (fileStats[i].getPath().getName().startsWith("_")) {
          continue;
        }

        LOG.info("processing " + fileStats[i].getPath());
        FSLineReader reader = new FSLineReader(fileStats[i].getPath(), fs);

        Text line = new Text();
        while (reader.readLine(line) > 0) {
          String[] arr = line.toString().split("\\t+", 2);

          int docno = Integer.parseInt(arr[0]);
          int len = Integer.parseInt(arr[1]);

          // Note that because of speculative execution there may be
          // multiple copies of doclength data. Therefore, we can't
          // just count number of doclengths read. Instead, keep track
          // of largest docno encountered.
          if (docno < docnoOffset) {
            throw new RuntimeException(
                "Error: docno " + docno + " < docnoOffset " + docnoOffset + "!");
          }

          doclengths[docno - docnoOffset] = len;

          if (docno > maxDocno) {
            maxDocno = docno;
          }
          if (docno < minDocno) {
            minDocno = docno;
          }
        }
        reader.close();
        context.getCounter(DocLengths.Files).increment(1);
      }

      LOG.info("min docno: " + minDocno);
      LOG.info("max docno: " + maxDocno);
View Full Code Here

  }

  @Test
  public void testSerializeLazy2() throws IOException {
    HMapIDW.setLazyDecodeFlag(true);
    HMapIDW m1 = new HMapIDW();

    m1.put(3, 5.0);
    m1.put(4, 22.0);

    byte[] bytes = m1.serialize();
    HMapIDW m2 = HMapIDW.create(bytes);

    assertFalse(m2.isDecoded());
    assertEquals(2, m2.size());

    int[] keys = m2.getKeys();
    double[] values = m2.getValues();

    assertTrue(keys[0] == 3);
    assertTrue(keys[1] == 4);

    assertTrue(values[0] == 5.0);
    assertTrue(values[1] == 22.0);

    m2.decode();
    double value;
    assertEquals(m2.size(), 2);

    value = m2.get(3);
    assertTrue(value == 5.0);

    value = m2.remove(3);
    assertEquals(m2.size(), 1);

    value = m2.get(4);
    assertTrue(value == 22.0);
  }
View Full Code Here

  @Test
  public void testLazyPlus1() throws IOException {
    HMapIDW.setLazyDecodeFlag(true);

    HMapIDW m1 = new HMapIDW();
    m1.put(3, 5.0);
    m1.put(4, 22.0);

    byte[] bytes1 = m1.serialize();

    HMapIDW m2 = new HMapIDW();
    m2.put(3, 1.0);
    m2.put(4, 1.0);
    m2.put(5, 1.0);

    byte[] bytes2 = m2.serialize();

    HMapIDW n1 = HMapIDW.create(bytes1);
    HMapIDW n2 = HMapIDW.create(bytes2);

    assertFalse(n1.isDecoded());
    assertEquals(2, n1.size());

    assertFalse(n2.isDecoded());
    assertEquals(3, n2.size());

    // n1 isn't decoded, n2 isn't decoded
    n1.plus(n2);

    assertTrue(n1.size() == 3);
    assertTrue(n1.get(3) == 6.0);
    assertTrue(n1.get(4) == 23.0);
    assertTrue(n1.get(5) == 1.0);
    assertTrue(n1.isDecoded());
    assertFalse(n2.isDecoded());
  }
View Full Code Here

  @Test
  public void testLazyPlus2() throws IOException {
    HMapIDW.setLazyDecodeFlag(true);

    HMapIDW m1 = new HMapIDW();
    m1.put(3, 5.0);
    m1.put(4, 22.0);

    byte[] bytes1 = m1.serialize();

    HMapIDW m2 = new HMapIDW();
    m2.put(3, 1.0);
    m2.put(4, 1.0);
    m2.put(5, 1.0);

    byte[] bytes2 = m2.serialize();

    HMapIDW n1 = HMapIDW.create(bytes1);
    HMapIDW n2 = HMapIDW.create(bytes2);

    assertFalse(n1.isDecoded());
    assertEquals(2, n1.size());

    assertFalse(n2.isDecoded());
    assertEquals(3, n2.size());

    // n1 isn't decoded, n2 is
    n2.decode();
    n1.plus(n2);

    assertTrue(n1.size() == 3);
    assertTrue(n1.get(3) == 6.0);
    assertTrue(n1.get(4) == 23.0);
    assertTrue(n1.get(5) == 1.0);
    assertTrue(n1.isDecoded());
    assertTrue(n2.isDecoded());
  }
View Full Code Here

  @Test
  public void testLazyPlus3() throws IOException {
    HMapIDW.setLazyDecodeFlag(true);

    HMapIDW m1 = new HMapIDW();
    m1.put(3, 5.0);
    m1.put(4, 22.0);

    byte[] bytes1 = m1.serialize();

    HMapIDW m2 = new HMapIDW();
    m2.put(3, 1.0);
    m2.put(4, 1.0);
    m2.put(5, 1.0);

    byte[] bytes2 = m2.serialize();

    HMapIDW n1 = HMapIDW.create(bytes1);
    HMapIDW n2 = HMapIDW.create(bytes2);

    assertFalse(n1.isDecoded());
    assertEquals(2, n1.size());

    assertFalse(n2.isDecoded());
    assertEquals(3, n2.size());

    // n2 isn't decoded, n1 is
    n1.decode();
    n1.plus(n2);

    assertTrue(n1.size() == 3);
    assertTrue(n1.get(3) == 6.0);
    assertTrue(n1.get(4) == 23.0);
    assertTrue(n1.get(5) == 1.0);
    assertTrue(n1.isDecoded());
    assertFalse(n2.isDecoded());
  }
View Full Code Here

  @Test
  public void testLazyPlus4() throws IOException {
    HMapIDW.setLazyDecodeFlag(true);

    HMapIDW m1 = new HMapIDW();
    m1.put(3, 5.0);
    m1.put(4, 22.0);

    byte[] bytes1 = m1.serialize();

    HMapIDW m2 = new HMapIDW();
    m2.put(3, 1.0);
    m2.put(4, 1.0);
    m2.put(5, 1.0);

    byte[] bytes2 = m2.serialize();

    HMapIDW n1 = HMapIDW.create(bytes1);
    HMapIDW n2 = HMapIDW.create(bytes2);

    assertFalse(n1.isDecoded());
    assertEquals(2, n1.size());

    assertFalse(n2.isDecoded());
    assertEquals(3, n2.size());

    // both n1 and n2 are decoded
    n1.decode();
    n2.decode();
    n1.plus(n2);

    assertTrue(n1.size() == 3);
    assertTrue(n1.get(3) == 6.0);
    assertTrue(n1.get(4) == 23.0);
    assertTrue(n1.get(5) == 1.0);
    assertTrue(n1.isDecoded());
    assertTrue(n2.isDecoded());
  }
View Full Code Here

TOP

Related Classes of edu.umd.cloud9.io.map.HMapIDW

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.