Examples of HCatSchema


Examples of org.apache.hcatalog.data.schema.HCatSchema

      // else - this means pig's optimizer never invoked the pushProjection
      // method - so we need all fields and hence we should not call the
      // setOutputSchema on HCatInputFormat
      if (HCatUtil.checkJobContextIfRunningFromBackend(job)) {
        try {
          HCatSchema hcatTableSchema = (HCatSchema) udfProps.get(HCatConstants.HCAT_TABLE_SCHEMA);
          outputSchema = hcatTableSchema;
          HCatInputFormat.setOutputSchema(job, outputSchema);
        } catch (Exception e) {
          throw new IOException(e);
        }
View Full Code Here

Examples of org.apache.hcatalog.data.schema.HCatSchema

      .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true);

    Table table = phutil.getTable(location,
      hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job),
      PigHCatUtil.getHCatServerPrincipal(job));
    HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table);
    try {
      PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema);
    } catch (IOException e) {
      throw new PigException(
        "Table schema incompatible for reading through HCatLoader :" + e.getMessage()
          + ";[Table schema was " + hcatTableSchema.toString() + "]"
        , PigHCatUtil.PIG_EXCEPTION_CODE, e);
    }
    storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema);
    outputSchema = hcatTableSchema;
    return PigHCatUtil.getResourceSchema(hcatTableSchema);
View Full Code Here

Examples of org.apache.hcatalog.data.schema.HCatSchema

      return null;
    }

    Properties props = UDFContext.getUDFContext().getUDFProperties(
      classForUDFCLookup, new String[]{signature});
    HCatSchema hcatTableSchema = (HCatSchema) props.get(HCatConstants.HCAT_TABLE_SCHEMA);

    ArrayList<HCatFieldSchema> fcols = new ArrayList<HCatFieldSchema>();
    for (RequiredField rf : fields) {
      fcols.add(hcatTableSchema.getFields().get(rf.getIndex()));
    }
    return new HCatSchema(fcols);
  }
View Full Code Here

Examples of org.apache.hcatalog.data.schema.HCatSchema

    @Override
    public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
      OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
      HCatRecord record = new DefaultHCatRecord(3);
      HCatSchema schema = jobInfo.getOutputSchema();
      String vals[] = value.toString().split(",");
      record.setInteger("key", schema, Integer.parseInt(vals[0]));
      for (int i = 1; i < vals.length; i++) {
        String pair[] = vals[i].split(":");
        record.set(pair[0], schema, pair[1]);
View Full Code Here

Examples of org.apache.hcatalog.data.schema.HCatSchema

    @Override
    public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
      OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
      HCatRecord record = new DefaultHCatRecord(3);
      HCatSchema schema = jobInfo.getOutputSchema();
      String vals[] = value.toString().split(",");
      record.setInteger("key", schema, Integer.parseInt(vals[0]));
      synchronized (MapWriteAbortTransaction.class) {
        if (count == 2) {
          failedKey = vals[0];
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

    job.setOutputKeyClass(WritableComparable.class);
    job.setOutputValueClass(DefaultHCatRecord.class);
    job.setNumReduceTasks(0);
    HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
      outputTableName, null));
    HCatSchema s = HCatInputFormat.getTableSchema(job);
    System.err.println("INFO: output schema explicitly set for writing:"
      + s);
    HCatOutputFormat.setSchema(job, s);
    job.setOutputFormatClass(HCatOutputFormat.class);
    return (job.waitForCompletion(true) ? 0 : 1);
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

    job.setMapperClass(Map.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DefaultHCatRecord.class);
    HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
      outputTableName, null));
    HCatSchema s = HCatInputFormat.getTableSchema(job);
    System.err.println("INFO: output schema explicitly set for writing:"
      + s);
    HCatOutputFormat.setSchema(job, s);
    job.setOutputFormatClass(HCatOutputFormat.class);
    return (job.waitForCompletion(true) ? 0 : 1);
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

        break;
      }
      if (valueToken != JsonToken.START_OBJECT) {
        throw new IOException("Start of Object expected");
      }
      HCatSchema subSchema = hcatFieldSchema.getStructSubSchema();
      int sz = subSchema.getFieldNames().size();

      List<Object> struct = new ArrayList<Object>(Collections.nCopies(sz, null));
      while ((valueToken = p.nextToken()) != JsonToken.END_OBJECT) {
        populateRecord(struct, valueToken, p, subSchema);
      }
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

      // else - this means pig's optimizer never invoked the pushProjection
      // method - so we need all fields and hence we should not call the
      // setOutputSchema on HCatInputFormat
      if (HCatUtil.checkJobContextIfRunningFromBackend(job)) {
        try {
          HCatSchema hcatTableSchema = (HCatSchema) udfProps.get(HCatConstants.HCAT_TABLE_SCHEMA);
          outputSchema = hcatTableSchema;
          HCatInputFormat.setOutputSchema(job, outputSchema);
        } catch (Exception e) {
          throw new IOException(e);
        }
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

      .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true);

    Table table = phutil.getTable(location,
      hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job),
      PigHCatUtil.getHCatServerPrincipal(job));
    HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table);
    try {
      PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema);
    } catch (IOException e) {
      throw new PigException(
        "Table schema incompatible for reading through HCatLoader :" + e.getMessage()
          + ";[Table schema was " + hcatTableSchema.toString() + "]"
        , PigHCatUtil.PIG_EXCEPTION_CODE, e);
    }
    storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema);
    outputSchema = hcatTableSchema;
    return PigHCatUtil.getResourceSchema(hcatTableSchema);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.