Package org.apache.hcatalog.data

Examples of org.apache.hcatalog.data.HCatRecord


  public static class MapHCatWrite extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {

    @Override
    public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
      OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
      HCatRecord record = new DefaultHCatRecord(3);
      HCatSchema schema = jobInfo.getOutputSchema();
      String vals[] = value.toString().split(",");
      record.setInteger("key", schema, Integer.parseInt(vals[0]));
      for (int i = 1; i < vals.length; i++) {
        String pair[] = vals[i].split(":");
        record.set(pair[0], schema, pair[1]);
      }
      context.write(null, record);
    }
View Full Code Here


    private static int count = 0;

    @Override
    public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
      OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
      HCatRecord record = new DefaultHCatRecord(3);
      HCatSchema schema = jobInfo.getOutputSchema();
      String vals[] = value.toString().split(",");
      record.setInteger("key", schema, Integer.parseInt(vals[0]));
      synchronized (MapWriteAbortTransaction.class) {
        if (count == 2) {
          failedKey = vals[0];
          throw new IOException("Failing map to test abort");
        }
        for (int i = 1; i < vals.length; i++) {
          String pair[] = vals[i].split(":");
          record.set(pair[0], schema, pair[1]);
        }
        context.write(null, record);
        count++;
      }
View Full Code Here

        hCatRecordReader.close();
      }

      @Override
      public ITuple getCurrentKey() throws IOException, InterruptedException {
        HCatRecord record = hCatRecordReader.getCurrentValue();
        // Perform conversion between HCatRecord and Tuple
        for(int pos = 0; pos < schema.size(); pos++) {
          tuple.set(pos, record.get(pos));
        }
        return tuple;
      }

      @Override
View Full Code Here

    protected abstract Iterable<Edge<I, E>> getEdges(HCatRecord record);

    @Override
    public final Vertex<I, V, E> getCurrentVertex()
      throws IOException, InterruptedException {
      HCatRecord record = getRecordReader().getCurrentValue();
      Vertex<I, V, E> vertex = getConf().createVertex();
      vertex.initialize(getVertexId(record), getVertexValue(record),
          getEdges(record));
      ++recordCount;
      if (log.isInfoEnabled() &&
View Full Code Here

    }

    @Override
    public boolean nextVertex() throws IOException, InterruptedException {
      while (getRecordReader().nextKeyValue()) {
        HCatRecord record = getRecordReader().getCurrentValue();
        if (currentVertexId == null) {
          currentVertexId = getVertexId(record);
        }
        if (currentVertexId.equals(getVertexId(record))) {
          currentEdges.add(EdgeFactory.create(getTargetVertexId(record),
View Full Code Here

    * create record
    * @param vertex to populate record
    * @return HCatRecord newly created
    */
    protected HCatRecord createRecord(Vertex<I, V, E> vertex) {
      HCatRecord record = new DefaultHCatRecord(getNumColumns());
      fillRecord(record, vertex);
      return record;
    }
View Full Code Here

    try {
      committer = outFormat.getOutputCommitter(cntxt);
      committer.setupTask(cntxt);
      writer = outFormat.getRecordWriter(cntxt);
      while (recordItr.hasNext()) {
        HCatRecord rec = recordItr.next();
        writer.write(null, rec);
      }
      writer.close(cntxt);
      if (committer.needsTaskCommit(cntxt)) {
        committer.commitTask(cntxt);
View Full Code Here

        count++;
        sum += Double.parseDouble(gpa.toString());
      }

      HCatRecord record = new DefaultHCatRecord(2);
      record.set(0, name);
      record.set(1, Double.toString(sum));

      context.write(null, record);
    }
View Full Code Here

    public static class MapHCatWrite extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {
        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key", schema, Integer.parseInt(vals[0]));
            for (int i = 1; i < vals.length; i++) {
                String pair[] = vals[i].split(":");
                record.set(pair[0], schema, pair[1]);
            }
            context.write(null, record);
        }
View Full Code Here

        private int i = 0;

        @Override
        protected void map(LongWritable key, Text value, Context context)
            throws IOException, InterruptedException {
            HCatRecord record = null;
            String[] splits = value.toString().split(",");
            switch (i) {
            case 0:
                record = new DefaultHCatRecord(2);
                record.set(0, splits[0]);
                record.set(1, splits[1]);
                break;
            case 1:
                record = new DefaultHCatRecord(1);
                record.set(0, splits[0]);
                break;
            case 2:
                record = new DefaultHCatRecord(3);
                record.set(0, splits[0]);
                record.set(1, splits[1]);
                record.set(2, "extra");
                break;
            default:
                Assert.fail("This should not happen!!!!!");
            }
            MultiOutputFormat.write(tableNames[i], null, record, context);
View Full Code Here

TOP

Related Classes of org.apache.hcatalog.data.HCatRecord

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.