Package org.apache.hadoop.hive.ql.metadata

Examples of org.apache.hadoop.hive.ql.metadata.HiveException


    }
    for(int i=0; i<aggs.length; i++) {
      try {
        a.add(aggregationsEvaluateMethods[i].invoke(aggs[i]));
      } catch (Exception e) {
        throw new HiveException("Unable to execute UDAF function " + aggregationsEvaluateMethods[i] + " "
            + " on object " + "(" + aggs[i] + ") " + ": " + e.getMessage());
      }
    }
    forward(a, outputObjectInspector);
  }
View Full Code Here


            // Just do nothing here.
          }
        }
      } catch (Exception e) {
        e.printStackTrace();
        throw new HiveException(e);
      }
    }
    super.close(abort);
  }
View Full Code Here

        try {
          Class sdclass = td.getDeserializerClass();
          if(sdclass == null) {
            String className = td.getSerdeClassName();
            if ((className == "") || (className == null)) {
              throw new HiveException("SerDe class or the SerDe class name is not set for table: " + td.getProperties().getProperty("name"));
            }
            sdclass = MapOperator.class.getClassLoader().loadClass(className);
          }
          deserializer = (Deserializer) sdclass.newInstance();
          deserializer.initialize(hconf, p);
          rowObjectInspector = (StructObjectInspector)deserializer.getObjectInspector();
         
          // Next check if this table has partitions and if so
          // get the list of partition names as well as allocate
          // the serdes for the partition columns
          String pcols = p.getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS);
          if (pcols != null && pcols.length() > 0) {
            partNames = new ArrayList<String>();
            partValues = new ArrayList<String>();
            partObjectInspectors = new ArrayList<ObjectInspector>();
            String[] partKeys = pcols.trim().split("/");
            for(String key: partKeys) {
              partNames.add(key);
              partValues.add(partSpec.get(key));
              partObjectInspectors.add(
                  ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class));
            }
            StructObjectInspector partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors);
           
            rowWithPart = new Object[2];
            rowWithPart[1] = partValues;
            rowObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(
                Arrays.asList(new StructObjectInspector[]{
                    rowObjectInspector,
                    partObjectInspector}));
          }
          else {
            partNames = null;
            partValues = null;
          }

          LOG.info("Got partitions: " + pcols);
        } catch (SerDeException e) {
          e.printStackTrace();
          throw new HiveException (e);
        } catch (InstantiationException e) {
          throw new HiveException (e);
        } catch (IllegalAccessException e) {
          throw new HiveException (e);
        } catch (ClassNotFoundException e) {
          throw new HiveException (e);
        }
      }
    }

    if(todo.size() == 0) {
      // didn't find match for input file path in configuration!
      // serious problem ..
      LOG.error("Configuration does not have any alias for path: " + fpath.toUri().getPath());
      throw new HiveException("Configuration and input path are inconsistent");
    }

    // we found all the operators that we are supposed to process. now bootstrap
    this.setChildOperators(todo);
    // the child operators may need the global mr configuration. set it now so
View Full Code Here

        forward(rowWithPart, rowObjectInspector);
      }
    } catch (SerDeException e) {
      // TODO: policy on deserialization errors
      deserialize_error_count.set(deserialize_error_count.get()+1);
      throw new HiveException (e);
    }
  }
View Full Code Here

    }
  }

  public void process(Object row, ObjectInspector rowInspector)
      throws HiveException {
    throw new HiveException("Hive 2 Internal error: should not be called!");
  }
View Full Code Here

  }

  public void evaluate(Object row, ObjectInspector rowInspector,
      InspectableObject result) throws HiveException {
    if (result == null) {
      throw new HiveException("result cannot be null.");
    }
    // Evaluate all children first
    for(int i=0; i<paramEvaluators.length; i++) {
      paramEvaluators[i].evaluate(row, rowInspector, paramInspectableObjects[i]);
      Category c = paramInspectableObjects[i].oi.getCategory();
View Full Code Here

                                   "ErrorProcessor");
      errThread.start();

    } catch (Exception e) {
      e.printStackTrace();
      throw new HiveException ("Cannot initialize ScriptOperator", e);
    }
  }
View Full Code Here

  }

  Text text = new Text();
  public void process(Object row, ObjectInspector rowInspector) throws HiveException {
    if(scriptError != null) {
      throw new HiveException(scriptError);
    }
    try {
      text = (Text) scriptInputSerializer.serialize(row, rowInspector);
      scriptOut.write(text.getBytes(), 0, text.getLength());
      scriptOut.write(Utilities.newLineCode);
    } catch (SerDeException e) {
      LOG.error("Error in serializing the row: " + e.getMessage());
      scriptError = e;
      serialize_error_count.set(serialize_error_count.get() + 1);
      throw new HiveException(e);
    } catch (IOException e) {
      LOG.error("Error in writing to script: " + e.getMessage());
      scriptError = e;
      throw new HiveException(e);
    }
  }
View Full Code Here

    } catch (Exception e) {}

    super.close(new_abort);

    if(new_abort && !abort) {
      throw new HiveException ("Hit error while closing ..");
    }
  }
View Full Code Here

            db.revokeRole(roleName, userName, principal.getType());
          }
        }
      }
    } catch (Exception e) {
      throw new HiveException(e);
    }
    return 0;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.metadata.HiveException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.