Package org.apache.hadoop.hbase

Examples of org.apache.hadoop.hbase.HTableDescriptor


   *           on failure to read column family descriptors
   */
  @VisibleForTesting
  static void configureBlockSize(HTable table, Configuration conf) throws IOException {
    StringBuilder blockSizeConfigValue = new StringBuilder();
    HTableDescriptor tableDescriptor = table.getTableDescriptor();
    if (tableDescriptor == null) {
      // could happen with mock table instance
      return;
    }
    Collection<HColumnDescriptor> families = tableDescriptor.getFamilies();
    int i = 0;
    for (HColumnDescriptor familyDescriptor : families) {
      if (i++ > 0) {
        blockSizeConfigValue.append('&');
      }
View Full Code Here


   * @throws IOException
   *           on failure to read column family descriptors
   */
  @VisibleForTesting
  static void configureBloomType(HTable table, Configuration conf) throws IOException {
    HTableDescriptor tableDescriptor = table.getTableDescriptor();
    if (tableDescriptor == null) {
      // could happen with mock table instance
      return;
    }
    StringBuilder bloomTypeConfigValue = new StringBuilder();
    Collection<HColumnDescriptor> families = tableDescriptor.getFamilies();
    int i = 0;
    for (HColumnDescriptor familyDescriptor : families) {
      if (i++ > 0) {
        bloomTypeConfigValue.append('&');
      }
View Full Code Here

   *           on failure to read column family descriptors
   */
  @VisibleForTesting
  static void configureDataBlockEncoding(HTable table,
      Configuration conf) throws IOException {
    HTableDescriptor tableDescriptor = table.getTableDescriptor();
    if (tableDescriptor == null) {
      // could happen with mock table instance
      return;
    }
    StringBuilder dataBlockEncodingConfigValue = new StringBuilder();
    Collection<HColumnDescriptor> families = tableDescriptor.getFamilies();
    int i = 0;
    for (HColumnDescriptor familyDescriptor : families) {
      if (i++ > 0) {
        dataBlockEncodingConfigValue.append('&');
      }
View Full Code Here

    return job;
  }

  private static void createTable(HBaseAdmin admin, String tableName, String[] columns)
      throws IOException {
    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
    Set<String> cfSet = new HashSet<String>();
    for (String aColumn : columns) {
      if (TsvParser.ROWKEY_COLUMN_SPEC.equals(aColumn)
          || TsvParser.TIMESTAMPKEY_COLUMN_SPEC.equals(aColumn)
          || TsvParser.CELL_VISIBILITY_COLUMN_SPEC.equals(aColumn)
          || TsvParser.ATTRIBUTES_COLUMN_SPEC.equals(aColumn))
        continue;
      // we are only concerned with the first one (in case this is a cf:cq)
      cfSet.add(aColumn.split(":", 2)[0]);
    }
    for (String cf : cfSet) {
      HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes(cf));
      htd.addFamily(hcd);
    }
    LOG.warn(format("Creating table '%s' with '%s' columns and default descriptors.",
      tableName, cfSet));
    admin.createTable(htd);
  }
View Full Code Here

  }

  protected void initHRegion(byte[] tableName, String callingMethod,
    HBaseConfiguration conf, byte[]... families)
    throws IOException {
    HTableDescriptor htd = constructTableDescriptor(tableName, families);
    HRegionInfo info = new HRegionInfo(htd, null, null, false);
    Path path = new Path(DIR + callingMethod);
    region = HRegion.createHRegion(info, path, conf);
  }
View Full Code Here

    region = HRegion.createHRegion(info, path, conf);
  }

  protected HTableDescriptor constructTableDescriptor(byte[] tableName,
    byte[]... families) {
    HTableDescriptor htd = new HTableDescriptor(tableName);
    for (byte[] family : families) {
      htd.addFamily(new HColumnDescriptor(family));
    }
    return htd;
  }
View Full Code Here

      LOG.debug("Files for region: " + b);
      listPaths(fs, b.getRegionDir());
    }
   
    HBaseConfiguration conf = a.getConf();
    HTableDescriptor tabledesc = a.getTableDesc();
    HLog log = a.getLog();
    Path basedir = a.getBaseDir();
    // Presume both are of same region type -- i.e. both user or catalog
    // table regions.  This way can use comparator.
    final byte [] startKey = a.comparator.matchingRows(a.getStartKey(), 0,
View Full Code Here

    Path reconstructionLog = null;
    Progressable reporter = null;

    fs.delete(logdir, true);

    HTableDescriptor htd = new HTableDescriptor(table);
    htd.addFamily(hcd);
    HRegionInfo info = new HRegionInfo(htd, null, null, false);
    HLog hlog = new HLog(fs, logdir, conf, null);
    HRegion region = new HRegion(basedir, hlog, fs, conf, info, null);
   
    store = new Store(basedir, region, hcd, fs, reconstructionLog, conf,
View Full Code Here

  private long colsPerRow = FAMILIES.length * QUALIFIERS_ONE.length;
   
 
  protected void setUp() throws Exception {
    super.setUp();
    HTableDescriptor htd = new HTableDescriptor(getName());
    htd.addFamily(new HColumnDescriptor(FAMILIES[0]));
    htd.addFamily(new HColumnDescriptor(FAMILIES[1]));
    HRegionInfo info = new HRegionInfo(htd, null, null, false);
    this.region = HRegion.createHRegion(info, this.testDir, this.conf);
   
    // Insert first half
    for(byte [] ROW : ROWS_ONE) {
View Full Code Here

    return createTable(tableName, new byte[][]{family});
  }
 
  private HTable createTable(byte [] tableName, byte [][] families)
  throws IOException {
    HTableDescriptor desc = new HTableDescriptor(tableName);
    for(byte [] family : families) {
      desc.addFamily(new HColumnDescriptor(family));
    }
    HBaseAdmin admin = new HBaseAdmin(conf);
    admin.createTable(desc);
    return new HTable(conf, tableName);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.HTableDescriptor

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.