Examples of HiveStorageHandler


Examples of org.apache.hadoop.hive.ql.metadata.HiveStorageHandler

    if (tableDesc == null) {
      return;
    }

    try {
      HiveStorageHandler storageHandler =
        HiveUtils.getStorageHandler(
          Hive.get().getConf(),
          tableDesc.getProperties().getProperty(
            org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE));
      if (storageHandler != null) {
        Map<String, String> jobProperties = new LinkedHashMap<String, String>();
        if(input) {
            try {
                storageHandler.configureInputJobProperties(
                  tableDesc,
                  jobProperties);
            } catch(AbstractMethodError e) {
                LOG.debug("configureInputJobProperties not found "+
                    "using configureTableJobProperties",e);
                storageHandler.configureTableJobProperties(tableDesc, jobProperties);
            }
        }
        else {
            try {
                storageHandler.configureOutputJobProperties(
                  tableDesc,
                  jobProperties);
            } catch(AbstractMethodError e) {
                LOG.debug("configureOutputJobProperties not found"+
                    "using configureTableJobProperties",e);
                storageHandler.configureTableJobProperties(tableDesc, jobProperties);
            }
            if (tableDesc.getOutputFileFormatClass().getName()
                     == HivePassThroughOutputFormat.HIVE_PASSTHROUGH_OF_CLASSNAME) {
             // get the real output format when we register this for the table
             jobProperties.put(
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.HiveStorageHandler

  public static void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
    String handlerClass = tableDesc.getProperties().getProperty(
        org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE);
    try {
      HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(jobConf, handlerClass);
      if (storageHandler != null) {
        storageHandler.configureJobConf(tableDesc, jobConf);
      }
    } catch (HiveException e) {
      throw new RuntimeException(e);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.HiveStorageHandler

              .getName());
      }
    } else {
      try {
        LOG.info("Creating instance of storage handler to get input/output, serder info.");
        HiveStorageHandler sh = HiveUtils.getStorageHandler(conf,
          storageHandler);
        sd.setInputFormat(sh.getInputFormatClass().getName());
        sd.setOutputFormat(sh.getOutputFormatClass().getName());
        sd.getSerdeInfo().setSerializationLib(
          sh.getSerDeClass().getName());
        newTable.putToParameters(
          org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE,
          storageHandler);
      } catch (HiveException e) {
        throw new HCatException(
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.HiveStorageHandler

      tableScanDesc.setFilterExpr(originalPredicate);
    }
    if (!tbl.isNonNative()) {
      return originalPredicate;
    }
    HiveStorageHandler storageHandler = tbl.getStorageHandler();
    if (!(storageHandler instanceof HiveStoragePredicateHandler)) {
      // The storage handler does not provide predicate decomposition
      // support, so we'll implement the entire filter in Hive.  However,
      // we still provide the full predicate to the storage handler in
      // case it wants to do any of its own prefiltering.
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.HiveStorageHandler

    if (crtTbl.getStorageHandler() != null) {
      tbl.setProperty(
        org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE,
        crtTbl.getStorageHandler());
    }
    HiveStorageHandler storageHandler = tbl.getStorageHandler();

    /*
     * We use LazySimpleSerDe by default.
     *
     * If the user didn't specify a SerDe, and any of the columns are not simple
     * types, we will have to use DynamicSerDe instead.
     */
    if (crtTbl.getSerName() == null) {
      if (storageHandler == null) {
        LOG.info("Default to LazySimpleSerDe for table " + crtTbl.getTableName());
        tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
      } else {
        String serDeClassName = storageHandler.getSerDeClass().getName();
        LOG.info("Use StorageHandler-supplied " + serDeClassName
          + " for table " + crtTbl.getTableName());
        tbl.setSerializationLib(serDeClassName);
      }
    } else {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.HiveStorageHandler

                if (inputFormatObj instanceof ContentSummaryInputFormat) {
                  ContentSummaryInputFormat cs = (ContentSummaryInputFormat) inputFormatObj;
                  resultMap.put(pathStr, cs.getContentSummary(p, myJobConf));
                  return;
                }
                HiveStorageHandler handler = HiveUtils.getStorageHandler(myConf,
                    partDesc.getOverlayedProperties().getProperty(
                    hive_metastoreConstants.META_TABLE_STORAGE));
                if (handler instanceof InputEstimator) {
                  long total = 0;
                  TableDesc tableDesc = partDesc.getTableDesc();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.HiveStorageHandler

    if (crtTbl.getStorageHandler() != null) {
      tbl.setProperty(
          org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE,
          crtTbl.getStorageHandler());
    }
    HiveStorageHandler storageHandler = tbl.getStorageHandler();

    /*
     * We use LazySimpleSerDe by default.
     *
     * If the user didn't specify a SerDe, and any of the columns are not simple
     * types, we will have to use DynamicSerDe instead.
     */
    if (crtTbl.getSerName() == null) {
      if (storageHandler == null) {
        LOG.info("Default to LazySimpleSerDe for table " + crtTbl.getTableName());
        tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
      } else {
        String serDeClassName = storageHandler.getSerDeClass().getName();
        LOG.info("Use StorageHandler-supplied " + serDeClassName
            + " for table " + crtTbl.getTableName());
        tbl.setSerializationLib(serDeClassName);
      }
    } else {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.HiveStorageHandler

                            .getName());
            }
        } else {
            try {
                LOG.info("Creating instance of storage handler to get input/output, serder info.");
                HiveStorageHandler sh = HiveUtils.getStorageHandler(conf,
                    storageHandler);
                sd.setInputFormat(sh.getInputFormatClass().getName());
                sd.setOutputFormat(sh.getOutputFormatClass().getName());
                sd.getSerdeInfo().setSerializationLib(
                    sh.getSerDeClass().getName());
                newTable.putToParameters(
                    org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE,
                    storageHandler);
            } catch (HiveException e) {
                throw new HCatException(
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.HiveStorageHandler

    if (tableDesc == null) {
      return;
    }

    try {
      HiveStorageHandler storageHandler =
        HiveUtils.getStorageHandler(
          Hive.get().getConf(),
          tableDesc.getProperties().getProperty(
            org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE));
      if (storageHandler != null) {
        Map<String, String> jobProperties = new LinkedHashMap<String, String>();
        if(input) {
            try {
                storageHandler.configureInputJobProperties(
                  tableDesc,
                  jobProperties);
            } catch(AbstractMethodError e) {
                LOG.debug("configureInputJobProperties not found "+
                    "using configureTableJobProperties",e);
                storageHandler.configureTableJobProperties(tableDesc, jobProperties);
            }
        }
        else {
            try {
                storageHandler.configureOutputJobProperties(
                  tableDesc,
                  jobProperties);
            } catch(AbstractMethodError e) {
                LOG.debug("configureOutputJobProperties not found"+
                    "using configureTableJobProperties",e);
                storageHandler.configureTableJobProperties(tableDesc, jobProperties);
            }
        }
        // Job properties are only relevant for non-native tables, so
        // for native tables, leave it null to avoid cluttering up
        // plans.
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.HiveStorageHandler

        authProviders.put(storageHandlerClass, authProviderClass);
    }

    /** Returns the StorageHandler of the Table obtained from the HCatStorageHandler */
    protected HiveAuthorizationProvider getDelegate(Table table) throws HiveException {
        HiveStorageHandler handler = table.getStorageHandler();

        if (handler != null) {
            if (handler instanceof HCatStorageHandler) {
                return ((HCatStorageHandler) handler).getAuthorizationProvider();
            } else {
                String authProviderClass = authProviders.get(handler.getClass().getCanonicalName());

                if (authProviderClass != null) {
                    try {
                        ReflectionUtils.newInstance(getConf().getClassByName(authProviderClass), getConf());
                    } catch (ClassNotFoundException ex) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.