Examples of LoadIncrementalHFiles


Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

    Path inputPath = copyResourceFileToHDFS("shakes.txt");
    Path outputPath1 = getTempPathOnHDFS("out1");
    Path outputPath2 = getTempPathOnHDFS("out2");
    HTable table1 = createTable(26);
    HTable table2 = createTable(26);
    LoadIncrementalHFiles loader = new LoadIncrementalHFiles(HBASE_TEST_UTILITY.getConfiguration());

    PCollection<String> shakespeare = pipeline.read(At.textFile(inputPath, Writables.strings()));
    PCollection<String> words = split(shakespeare, "\\s+");
    PCollection<String> shortWords = words.filter(SHORT_WORD_FILTER);
    PCollection<String> longWords = words.filter(FilterFns.not(SHORT_WORD_FILTER));
    PTable<String, Long> shortWordCounts = shortWords.count();
    PTable<String, Long> longWordCounts = longWords.count();
    HFileUtils.writePutsToHFilesForIncrementalLoad(
        convertToPuts(shortWordCounts),
        table1,
        outputPath1);
    HFileUtils.writePutsToHFilesForIncrementalLoad(
        convertToPuts(longWordCounts),
        table2,
        outputPath2);

    PipelineResult result = pipeline.run();
    assertTrue(result.succeeded());

    loader.doBulkLoad(outputPath1, table1);
    loader.doBulkLoad(outputPath2, table2);

    assertEquals(396L, getWordCountFromTable(table1, "of"));
    assertEquals(427L, getWordCountFromTable(table2, "and"));
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

    conf.set("com.lbt.extentsname", extp.getName());
    // job.getConfiguration().setBoolean("mapred.task.profile", true);
    // job.getConfiguration().setBoolean("mapreduce.task.profile", true);
    boolean result = job.waitForCompletion(true);
    if (result) {
      LoadIncrementalHFiles loader = new LoadIncrementalHFiles();
      HBaseConfiguration.addHbaseResources(conf);
      loader.setConf(conf);
      LOG.info("Loading hashes into hbase");
      loader.doBulkLoad(hfileDir, new HTable(conf, HBaseTables.HASH_TBL_B));
      result = fs.delete(hfileDir, true);
    }
    return result ? 0 : 1;
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

      HTable table = new HTable(conf, tableName);
      try {
        HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
        TEST_UTIL.waitTableEnabled(admin, tableName.getName());
        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
        loader.doBulkLoad(loadPath, table);
      } finally {
        table.close();
      }
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

      Path familyDir = new Path(dir, Bytes.toString(A));

      createHFile(util.getConfiguration(), fs, new Path(familyDir,Bytes.toString(A)), A, A);

      //Bulk load
      new LoadIncrementalHFiles(conf).doBulkLoad(dir, new HTable(conf, tableName));

      verifyMethodResult(SimpleRegionObserver.class,
          new String[] {"hadPreBulkLoadHFile", "hadPostBulkLoadHFile"},
          tableName,
          new Boolean[] {true, true}
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

      setPermission(loadPath, FsPermission.valueOf("-rwxrwxrwx"));

      HTable table = new HTable(conf, tableName);
      try {
        TEST_UTIL.waitTableAvailable(tableName, 30000);
        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
        loader.doBulkLoad(loadPath, table);
      } finally {
        table.close();
      }
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

    Path familyDir = new Path(dir, Bytes.toString(A));

    createHFile(util.getConfiguration(), fs, new Path(familyDir,Bytes.toString(A)), A, A);

    //Bulk load
    new LoadIncrementalHFiles(conf).doBulkLoad(dir, new HTable(conf, tableName));

    verifyMethodResult(SimpleRegionObserver.class,
        new String[] {"hadPreBulkLoadHFile", "hadPostBulkLoadHFile"},
        tableName,
        new Boolean[] {true, true}
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

    Path familyDir = new Path(dir, Bytes.toString(A));

    createHFile(util.getConfiguration(), fs, new Path(familyDir,Bytes.toString(A)), A, A);

    //Bulk load
    new LoadIncrementalHFiles(conf).doBulkLoad(dir, new HTable(conf, tableName));

    verifyMethodResult(SimpleRegionObserver.class,
        new String[] {"hadPreBulkLoadHFile", "hadPostBulkLoadHFile"},
        tableName,
        new Boolean[] {true, true}
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

      HTable table = new HTable(conf, tableName);
      try {
        HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
        TEST_UTIL.waitTableEnabled(admin, tableName.getName());
        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
        loader.doBulkLoad(loadPath, table);
      } finally {
        table.close();
      }
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

                  LOG.error("Import job failed, check JobTracker for details");
                  return false;
              }
 
              LOG.info("Loading HFiles from {}", outputPath);
              LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
              loader.doBulkLoad(outputPath, htable);
              htable.close();
 
              LOG.info("Incremental load complete for table=" + tableName);
 
              LOG.info("Removing output directory {}", outputPath);
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

    // Before we can load the HFiles, we need to set the permissions so that
    // HBase has write access to familyDir's contents
    chmod(familyDir.toString());

    HTable table = new HTable(conf, TABLE);
    LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
    loader.doBulkLoad(dir, table);

    assertEquals(expectedRows, HBaseTestUtil.countRows(table));

    // disable and drop if we succeeded to verify
    admin.disableTable(TABLE);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.