Examples of Hive


Examples of org.apache.hadoop.hive.ql.metadata.Hive

    Path tblDir;
    Configuration conf = context.getConf();
    try {
      Warehouse wh = new Warehouse(conf);
      if (loc == null || loc.isEmpty()){
      Hive hive = context.getHive();
        tblDir = wh.getTablePath(hive.getDatabase(hive.getCurrentDatabase()), tableName).getParent();
      }
      else{
        tblDir = wh.getDnsPath(new Path(loc));
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

    Map<String, String> hcatProps = new HashMap<String, String>(2);
    hcatProps.put(HCatConstants.HCAT_ISD_CLASS, inDriver);
    hcatProps.put(HCatConstants.HCAT_OSD_CLASS, outDriver);

    try {
      Hive db = context.getHive();
      Table tbl = db.getTable(tableName);
      if(partSpec == null){
        // File format is for table; not for partition.
        tbl.getTTable().getParameters().putAll(hcatProps);
        db.alterTable(tableName, tbl);
      }else{
        Partition part = db.getPartition(tbl,partSpec,false);
        Map<String,String> partParams = part.getParameters();
        if(partParams == null){
          partParams = new HashMap<String, String>();
        }
        partParams.putAll(hcatProps);
        part.getTPartition().setParameters(partParams);
        db.alterPartition(tableName, part);
      }
    } catch (HiveException he) {
      throw new SemanticException(he);
    } catch (InvalidOperationException e) {
      throw new SemanticException(e);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

   * @throws NoSuchObjectException
   * @throws TException
   */
  static private void pruneByPushDown(Table tab, Set<Partition> true_parts, String filter)
      throws HiveException, MetaException, NoSuchObjectException, TException {
    Hive db = Hive.get();
    List<Partition> parts = db.getPartitionsByFilter(tab, filter);
    true_parts.addAll(parts);
    return;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

      throws HiveException, AuthorizationException {
    HashSet<ReadEntity> inputs = sem.getInputs();
    HashSet<WriteEntity> outputs = sem.getOutputs();
    SessionState ss = SessionState.get();
    HiveOperation op = ss.getHiveOperation();
    Hive db = sem.getDb();
    if (op != null) {
      if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
          || op.equals(HiveOperation.CREATETABLE)) {
        ss.getAuthorizer().authorize(
            db.getDatabase(db.getCurrentDatabase()), null,
            HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
      } else {
        if (op.equals(HiveOperation.IMPORT)) {
          ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
          if (!isa.existsTable()) {
            ss.getAuthorizer().authorize(
                db.getDatabase(db.getCurrentDatabase()), null,
                HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
          }
        }
      }
      if (outputs != null && outputs.size() > 0) {
        for (WriteEntity write : outputs) {

          if (write.getType() == WriteEntity.Type.PARTITION) {
            Partition part = db.getPartition(write.getTable(), write
                .getPartition().getSpec(), false);
            if (part != null) {
              ss.getAuthorizer().authorize(write.getPartition(), null,
                      op.getOutputRequiredPrivileges());
              continue;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

                               String.valueOf(System.currentTimeMillis()),
                               "IMPLICIT");

      // Lock the database also
      try {
        Hive db = Hive.get(conf);
        lockObjects.add(new HiveLockObj(
                                        new HiveLockObject(db.getCurrentDatabase(), lockData),
                                        HiveLockMode.SHARED));
      } catch (HiveException e) {
        throw new SemanticException(e.getMessage());
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

    }

    FsPermission perms = FsPermission.valueOf(permsStr);

    if(!tblName.isEmpty()){
      Hive db = null;
      try{
        db = Hive.get();
        Table tbl =  db.getTable(tblName);
        Path tblPath = tbl.getPath();

        FileSystem fs = tblPath.getFileSystem(conf);
        if(null != perms){
          fs.setPermission(tblPath, perms);
        }
        if(null != grp){
          fs.setOwner(tblPath, null, grp);
        }
        return 0;

      } catch (Exception e){
          ss.err.println(String.format("Failed to set permissions/groups on TABLE: <%s> %s",tblName,e.getMessage()));
          try // We need to drop the table.
            if(null != db){ db.dropTable(tblName); }
          } catch (HiveException he) {
            ss.err.println(String.format("Failed to drop TABLE <%s> after failing to set permissions/groups on it. %s",tblName,e.getMessage()));
          }
          return 1;
      }
    }
    else{
      // looks like a db operation
      if (dbName.isEmpty() || dbName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)){
        // We dont set perms or groups for default dir.
        return 0;
      }
      else{
        try{
          Hive db = Hive.get();
          Path dbPath = new Warehouse(conf).getDatabasePath(db.getDatabase(dbName));
          FileSystem fs = dbPath.getFileSystem(conf);
          if(perms != null){
            fs.setPermission(dbPath, perms);
          }
          if(null != grp){
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

  private static HiveLockObject getLockObject(HiveConf conf, String path,
    HiveLockMode mode, HiveLockObjectData data,
    String parent, boolean verifyTablePartition)
      throws LockException {
    try {
      Hive db = Hive.get(conf);
      int indx = path.lastIndexOf("LOCK-" + mode.toString());
      String objName = path.substring(("/" + parent + "/").length(), indx-1);
      String[] names = objName.split("/");

      if (names.length < 2) {
        return null;
      }

      if (!verifyTablePartition) {
        return new HiveLockObject(names, data);
      }

      // do not throw exception if table does not exist
      Table tab = db.getTable(names[0], names[1], false);
      if (tab == null) {
        return null;
      }

      if (names.length == 2) {
        return new HiveLockObject(tab, data);
      }

      Map<String, String> partSpec = new HashMap<String, String>();
      for (indx = 2; indx < names.length; indx++) {
        String[] partVals = names[indx].split("=");
        partSpec.put(partVals[0], partVals[1]);
      }

      Partition partn;
      try {
        partn = db.getPartition(tab, partSpec, false);
      } catch (HiveException e) {
        partn = null;
      }

      if (partn == null) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

   * @param operator
   * @return partitions used by query.  null if they do not exist in index table
   */
  private Set<Partition> checkPartitionsCoveredByIndex(TableScanOperator tableScan, ParseContext pctx)
    throws HiveException {
    Hive hive = Hive.get(pctx.getConf());

    // make sure each partition exists on the index table
    PrunedPartitionList queryPartitionList = pctx.getOpToPartList().get(tableScan);
    Set<Partition> queryPartitions = queryPartitionList.getConfirmedPartns();

View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

  @Override
  public int execute(DriverContext driverContext) {

    // Create the db
    Hive db;
    try {
      db = Hive.get(conf);

      CreateDatabaseDesc createDatabaseDesc = work.getCreateDatabaseDesc();
      if (null != createDatabaseDesc) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

  @Override
  public int execute(DriverContext driverContext) {

    // Create the db
    Hive db;
    try {
      db = Hive.get(conf);

      CreateDatabaseDesc createDatabaseDesc = work.getCreateDatabaseDesc();
      if (null != createDatabaseDesc) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.