Package org.apache.hive.hcatalog.data

Examples of org.apache.hive.hcatalog.data.DefaultHCatRecord


      age = value.get(1) == null ? null : (Integer) value.get(1);
      gpa = value.get(2) == null ? null : (Double) value.get(2);

      if (gpa != null) gpa = Math.floor(gpa) + 0.1;

      HCatRecord record = new DefaultHCatRecord(5);
      record.set(0, name);
      record.set(1, age);
      record.set(2, gpa);

      context.write(null, record);

    }
View Full Code Here


      Iterator<IntWritable> iter = values.iterator();
      while (iter.hasNext()) {
        sum++;
        iter.next();
      }
      HCatRecord record = new DefaultHCatRecord(2);
      record.set(0, key.get());
      record.set(1, sum);

      context.write(null, record);
    }
View Full Code Here

    int i = 0;
    for (HCatFieldSchema fSchema : computedSchema.getFields()) {
      outgoing.add(getJavaObj(tuple.get(i++), fSchema));
    }
    try {
      writer.write(null, new DefaultHCatRecord(outgoing));
    } catch (InterruptedException e) {
      throw new BackendException("Error while writing tuple: " + tuple, PigHCatUtil.PIG_EXCEPTION_CODE, e);
    }
  }
View Full Code Here

      throws IOException, InterruptedException {
      String name = (String) value.get(0);
      int age = (Integer) value.get(1);
      String ds = (String) value.get(3);

      HCatRecord record = (filter == null ? new DefaultHCatRecord(3) : new DefaultHCatRecord(2));
      record.set(0, name);
      record.set(1, age);
      if (filter == null) record.set(2, ds);

      context.write(null, record);
View Full Code Here

      throws IOException, InterruptedException {
      s = value.get(0) == null ? null : (String) value.get(0);
      i = value.get(1) == null ? null : (Integer) value.get(1);
      d = value.get(2) == null ? null : (Double) value.get(2);

      HCatRecord record = new DefaultHCatRecord(5);
      record.set(0, s);
      record.set(1, i);
      record.set(2, d);

      context.write(null, record);

    }
View Full Code Here

      b = (Long) value.get(3);
      f = (Float) value.get(4);
      d = (Double) value.get(5);
      s = (String) value.get(6);

      HCatRecord record = new DefaultHCatRecord(7);
      record.set(0, t);
      record.set(1, si);
      record.set(2, i);
      record.set(3, b);
      record.set(4, f);
      record.set(5, d);
      record.set(6, s);

      context.write(null, record);

    }
View Full Code Here

      throws IOException, InterruptedException {
      s = value.get(0) == null ? null : (String) value.get(0);
      i = value.get(1) == null ? null : (Integer) value.get(1);
      d = value.get(2) == null ? null : (Double) value.get(2);

      HCatRecord record = new DefaultHCatRecord(3);
      record.set(0, s);
      record.set(1, i);
      record.set(2, d);

      context.write(null, record);

    }
View Full Code Here

      name = (String) value.get(0);
      age = (Integer) value.get(1);
      gpa = (Double) value.get(2);
      gpa = Math.floor(gpa) + 0.1;

      HCatRecord record = new DefaultHCatRecord(3);
      record.set(0, name);
      record.set(1, age);
      record.set(2, gpa);

      context.write(null, record);

    }
View Full Code Here

  public static class Map extends Mapper<LongWritable, Text, NullWritable, DefaultHCatRecord> {

    public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
      String[] cols = value.toString().split(",");
      DefaultHCatRecord record = new DefaultHCatRecord(3);
      record.set(0, Integer.parseInt(cols[0]));
      record.set(1, cols[1]);
      record.set(2, cols[2]);
      context.write(NullWritable.get(), record);
    }
View Full Code Here

  public static class Map extends Mapper<LongWritable, Text, NullWritable, DefaultHCatRecord> {

    public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
      String[] cols = value.toString().split(",");
      DefaultHCatRecord record = new DefaultHCatRecord(3);
      record.set(0, Integer.parseInt(cols[0]));
      record.set(1, cols[1]);
      record.set(2, cols[2]);
      context.write(NullWritable.get(), record);
    }
View Full Code Here

TOP

Related Classes of org.apache.hive.hcatalog.data.DefaultHCatRecord

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.