Examples of OutputJobInfo


Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

    public static class MapHCatWrite extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {

        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key",schema,Integer.parseInt(vals[0]));
            for(int i=1;i<vals.length;i++) {
                String pair[] = vals[i].split(":");
                record.set(pair[0],schema,pair[1]);
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

    static class MapWriteAbortTransaction extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {

        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key", schema, Integer.parseInt(vals[0]));
            if (vals[0].equals("3")) {
                throw new IOException("Failing map to test abort");
            }
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

        // Populate RM transaction in OutputJobInfo
        // In case of bulk mode, populate intermediate output location
        Map<String, String> tableJobProperties = tableDesc.getJobProperties();
        String jobString = tableJobProperties.get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
        try {
            OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(jobString);
            HCatTableInfo tableInfo = outputJobInfo.getTableInfo();
            String qualifiedTableName = HBaseHCatStorageHandler.getFullyQualifiedHBaseTableName(tableInfo);
            jobProperties.put(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY, qualifiedTableName);
            jobProperties.put(TableOutputFormat.OUTPUT_TABLE, qualifiedTableName);

            Configuration jobConf = getJobConf();
            addHbaseResources(jobConf, jobProperties);

            Configuration copyOfConf = new Configuration(jobConf);
            HBaseConfiguration.addHbaseResources(copyOfConf);

            String txnString = outputJobInfo.getProperties().getProperty(
                    HBaseConstants.PROPERTY_WRITE_TXN_KEY);
            Transaction txn = null;
            if (txnString == null) {
                txn = HBaseRevisionManagerUtil.beginWriteTransaction(qualifiedTableName, tableInfo, copyOfConf);
                String serializedTxn = HCatUtil.serialize(txn);
                outputJobInfo.getProperties().setProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY,
                        serializedTxn);
            } else {
                txn = (Transaction) HCatUtil.deserialize(txnString);
            }
            if (isBulkMode(outputJobInfo)) {
                String tableLocation = tableInfo.getTableLocation();
                String location = new Path(tableLocation, "REVISION_" + txn.getRevisionNumber())
                        .toString();
                outputJobInfo.getProperties().setProperty(PROPERTY_INT_OUTPUT_LOCATION, location);
                // We are writing out an intermediate sequenceFile hence
                // location is not passed in OutputJobInfo.getLocation()
                // TODO replace this with a mapreduce constant when available
                jobProperties.put("mapred.output.dir", location);
                jobProperties.put("mapred.output.committer.class", HBaseBulkOutputCommitter.class.getName());
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

  public void setStoreLocation(String location, Job job) throws IOException {
    job.getConfiguration().set(INNER_SIGNATURE, INNER_SIGNATURE_PREFIX + "_" + sign);
    Properties p = UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign});

    String[] userStr = location.split("\\.");
    OutputJobInfo outputJobInfo;

    String outInfoString = p.getProperty(HCatConstants.HCAT_KEY_OUTPUT_INFO);
    if (outInfoString != null) {
      outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(outInfoString);
    } else {
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

    @Override
    public OutputCommitter getOutputCommitter(TaskAttemptContext ctx)
            throws IOException, InterruptedException {
        String serializedJobInfo = ctx.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
        OutputJobInfo outputJobInfo = (OutputJobInfo)HCatUtil.deserialize(serializedJobInfo);
        ResourceSchema rs = PigHCatUtil.getResourceSchema(outputJobInfo.getOutputSchema());
        String location = outputJobInfo.getLocation();
        OutputFormat<BytesWritable,Tuple> outputFormat =  storeFunc.getOutputFormat();
        return new StoreFuncBasedOutputCommitter(storeFunc, outputFormat.getOutputCommitter(ctx), location, rs);
    }
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

    @Override
    public RecordWriter<BytesWritable, Tuple> getRecordWriter(
            TaskAttemptContext ctx) throws IOException, InterruptedException {
        RecordWriter<BytesWritable,Tuple> writer = storeFunc.getOutputFormat().getRecordWriter(ctx);
        String serializedJobInfo = ctx.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
        OutputJobInfo outputJobInfo = (OutputJobInfo)HCatUtil.deserialize(serializedJobInfo);
        ResourceSchema rs = PigHCatUtil.getResourceSchema(outputJobInfo.getOutputSchema());
        String location = outputJobInfo.getLocation();
        return new StoreFuncBasedRecordWriter(writer, storeFunc, location, rs);
    }
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

    }

    private OutputFormat<WritableComparable<?>, Put> getOutputFormat(JobConf job)
            throws IOException {
        String outputInfo = job.get(HCatConstants.HCAT_KEY_OUTPUT_INFO);
        OutputJobInfo outputJobInfo = (OutputJobInfo) HCatUtil.deserialize(outputInfo);
        OutputFormat<WritableComparable<?>, Put> outputFormat = null;
        if (HBaseHCatStorageHandler.isBulkMode(outputJobInfo)) {
            outputFormat = new HBaseBulkOutputFormat();
        } else {
            outputFormat = new HBaseDirectOutputFormat();
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.OutputJobInfo

        }
        return txn;
    }

    static Transaction getWriteTransaction(Configuration conf) throws IOException {
        OutputJobInfo outputJobInfo = (OutputJobInfo)HCatUtil.deserialize(conf.get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
        return (Transaction) HCatUtil.deserialize(outputJobInfo.getProperties()
                                                               .getProperty(HBaseConstants.PROPERTY_WRITE_TXN_KEY));
    }
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.OutputJobInfo

      if (crd != null) {
        job.getCredentials().addAll(crd);
      }
    } else {
      Job clone = new Job(job.getConfiguration());
      OutputJobInfo outputJobInfo;
      if (userStr.length == 2) {
        outputJobInfo = OutputJobInfo.create(userStr[0], userStr[1], partitions);
      } else if (userStr.length == 1) {
        outputJobInfo = OutputJobInfo.create(null, userStr[0], partitions);
      } else {
        throw new FrontendException("location " + location
          + " is invalid. It must be of the form [db.]table",
          PigHCatUtil.PIG_EXCEPTION_CODE);
      }
      Schema schema = (Schema) ObjectSerializer.deserialize(udfProps.getProperty(PIG_SCHEMA));
      if (schema != null) {
        pigSchema = schema;
      }
      if (pigSchema == null) {
        throw new FrontendException(
          "Schema for data cannot be determined.",
          PigHCatUtil.PIG_EXCEPTION_CODE);
      }
      String externalLocation = (String) udfProps.getProperty(HCatConstants.HCAT_PIG_STORER_EXTERNAL_LOCATION);
      if (externalLocation != null) {
        outputJobInfo.setLocation(externalLocation);
      }
      try {
        HCatOutputFormat.setOutput(job, outputJobInfo);
      } catch (HCatException he) {
        // pass the message to the user - essentially something about
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.OutputJobInfo

    super(config, sp);
  }

  @Override
  public WriterContext prepareWrite() throws HCatException {
    OutputJobInfo jobInfo = OutputJobInfo.create(we.getDbName(),
      we.getTableName(), we.getPartitionKVs());
    Job job;
    try {
      job = new Job(conf);
      HCatOutputFormat.setOutput(job, jobInfo);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.