Examples of HCatSchema


Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

    job.setMapOutputValueClass(IntWritable.class);
    job.setOutputKeyClass(WritableComparable.class);
    job.setOutputValueClass(DefaultHCatRecord.class);
    HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
      outputTableName, null));
    HCatSchema s = HCatOutputFormat.getTableSchema(job);
    System.err.println("INFO: output schema explicitly set for writing:"
      + s);
    HCatOutputFormat.setSchema(job, s);
    job.setOutputFormatClass(HCatOutputFormat.class);
    return (job.waitForCompletion(true) ? 0 : 1);
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

        fieldSchemas.add(getHCatFSFromPigFS(fSchema, hcatFieldSchema));
      } catch (HCatException he) {
        throw new FrontendException(he.getMessage(), PigHCatUtil.PIG_EXCEPTION_CODE, he);
      }
    }
    return new HCatSchema(fieldSchemas);
  }
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

        field = bagSchema.getField(0).schema.getField(0);
      } else {
        field = bagSchema.getField(0);
      }
      arrFields.add(getHCatFSFromPigFS(field, hcatFieldSchema == null ? null : hcatFieldSchema.getArrayElementSchema().get(0)));
      return new HCatFieldSchema(fSchema.alias, Type.ARRAY, new HCatSchema(arrFields), "");

    case DataType.TUPLE:
      List<String> fieldNames = new ArrayList<String>();
      List<HCatFieldSchema> hcatFSs = new ArrayList<HCatFieldSchema>();
      HCatSchema structSubSchema = hcatFieldSchema == null ? null : hcatFieldSchema.getStructSubSchema();
      List<FieldSchema> fields = fSchema.schema.getFields();
      for (int i = 0; i < fields.size(); i++) {
        FieldSchema fieldSchema = fields.get(i);
        fieldNames.add(fieldSchema.alias);
        hcatFSs.add(getHCatFSFromPigFS(fieldSchema, structSubSchema == null ? null : structSubSchema.get(i)));
      }
      return new HCatFieldSchema(fSchema.alias, Type.STRUCT, new HCatSchema(hcatFSs), "");

    case DataType.MAP: {
      // Pig's schema contain no type information about map's keys and
      // values. So, if its a new column assume <string,string> if its existing
      // return whatever is contained in the existing column.

      HCatFieldSchema valFS;
      List<HCatFieldSchema> valFSList = new ArrayList<HCatFieldSchema>(1);

      if (hcatFieldSchema != null) {
        return new HCatFieldSchema(fSchema.alias, Type.MAP, Type.STRING, hcatFieldSchema.getMapValueSchema(), "");
      }

      // Column not found in target table. Its a new column. Its schema is map<string,string>
      valFS = new HCatFieldSchema(fSchema.alias, Type.STRING, "");
      valFSList.add(valFS);
      return new HCatFieldSchema(fSchema.alias, Type.MAP, Type.STRING, new HCatSchema(valFSList), "");
    }

    default:
      throw new FrontendException("Unsupported type: " + type + "  in Pig's schema", PigHCatUtil.PIG_EXCEPTION_CODE);
    }
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

      case STRUCT:
        if (pigObj == null) {
          return null;
        }
        HCatSchema structSubSchema = hcatFS.getStructSubSchema();
        // Unwrap the tuple.
        List<Object> all = ((Tuple) pigObj).getAll();
        ArrayList<Object> converted = new ArrayList<Object>(all.size());
        for (int i = 0; i < all.size(); i++) {
          converted.add(getJavaObj(all.get(i), structSubSchema.get(i)));
        }
        return converted;

      case ARRAY:
        if (pigObj == null) {
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

          // Map values can be primitive or complex
        }
        break;

      case DataType.BAG:
        HCatSchema arrayElementSchema = hcatField == null ? null : hcatField.getArrayElementSchema();
        for (FieldSchema innerField : pigField.schema.getField(0).schema.getFields()) {
          validateSchema(innerField, getColFromSchema(pigField.alias, arrayElementSchema));
        }
        break;

      case DataType.TUPLE:
        HCatSchema structSubSchema = hcatField == null ? null : hcatField.getStructSubSchema();
        for (FieldSchema innerField : pigField.schema.getFields()) {
          validateSchema(innerField, getColFromSchema(pigField.alias, structSubSchema));
        }
        break;
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

      }
      Job job = new Job(conf, "typedatacheck");
      // initialize HCatInputFormat
      HCatInputFormat.setInput(job, InputJobInfo.create(
        dbName, tableName, null));
      HCatSchema s = HCatInputFormat.getTableSchema(job);
      job.getConfiguration().set(SCHEMA_KEY, schemaStr);
      job.getConfiguration().set(DELIM, outputdelim);
      job.setInputFormatClass(HCatInputFormat.class);
      job.setOutputFormatClass(TextOutputFormat.class);
      job.setJarByClass(TypeDataCheck.class);
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

      return result;
    }
  }

  public static HCatSchema extractSchema(Table table) throws HCatException {
    return new HCatSchema(HCatUtil.getHCatFieldSchemaList(table.getCols()));
  }
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

  public static HCatSchema extractSchema(Table table) throws HCatException {
    return new HCatSchema(HCatUtil.getHCatFieldSchemaList(table.getCols()));
  }

  public static HCatSchema extractSchema(Partition partition) throws HCatException {
    return new HCatSchema(HCatUtil.getHCatFieldSchemaList(partition.getCols()));
  }
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

    throws NoSuchObjectException, TException, MetaException {
    return new Table(client.getTable(dbName, tableName));
  }

  public static HCatSchema getTableSchemaWithPtnCols(Table table) throws IOException {
    HCatSchema tableSchema = new HCatSchema(HCatUtil.getHCatFieldSchemaList(table.getCols()));

    if (table.getPartitionKeys().size() != 0) {

      // add partition keys to table schema
      // NOTE : this assumes that we do not ever have ptn keys as columns
      // inside the table schema as well!
      for (FieldSchema fs : table.getPartitionKeys()) {
        tableSchema.append(HCatSchemaUtils.getHCatFieldSchema(fs));
      }
    }
    return tableSchema;
  }
View Full Code Here

Examples of org.apache.hive.hcatalog.data.schema.HCatSchema

   * @param table the instance to extract partition columns from
   * @return HCatSchema instance which contains the partition columns
   * @throws IOException
   */
  public static HCatSchema getPartitionColumns(Table table) throws IOException {
    HCatSchema cols = new HCatSchema(new LinkedList<HCatFieldSchema>());
    if (table.getPartitionKeys().size() != 0) {
      for (FieldSchema fs : table.getPartitionKeys()) {
        cols.append(HCatSchemaUtils.getHCatFieldSchema(fs));
      }
    }
    return cols;
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.