Package org.apache.hcatalog.data

Examples of org.apache.hcatalog.data.HCatRecord


    public static class MapHCatWrite extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {

        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key",schema,Integer.parseInt(vals[0]));
            for(int i=1;i<vals.length;i++) {
                String pair[] = vals[i].split(":");
                record.set(pair[0],schema,pair[1]);
            }
            context.write(null,record);
        }
View Full Code Here


    static class MapWriteAbortTransaction extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {

        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key", schema, Integer.parseInt(vals[0]));
            if (vals[0].equals("3")) {
                throw new IOException("Failing map to test abort");
            }
            for (int i = 1; i < vals.length; i++) {
                String pair[] = vals[i].split(":");
                record.set(pair[0], schema, pair[1]);
            }
            context.write(null, record);
        }
View Full Code Here

    InputStream fInStream = new FileInputStream(f);
    DataInput inpStream = new DataInputStream(fInStream);

    for(int i =0; i < recs.length; i++){
      HCatRecord rec = new DefaultHCatRecord();
      rec.readFields(inpStream);
      Assert.assertTrue(HCatDataCheckUtil.recordsEqual(recs[i],rec));
    }

    Assert.assertEquals(fInStream.available(), 0);
    fInStream.close();
View Full Code Here

    rec_1.add( new Double(5.3D));
    rec_1.add( new String("hcat and hadoop"));
    rec_1.add( null);
    rec_1.add( "null");

    HCatRecord tup_1 = new DefaultHCatRecord(rec_1);

    List<Object> rec_2 = new ArrayList<Object>(8);
    rec_2.add( new Byte("123"));
    rec_2.add( new Short("456"));
    rec_2.add( new Integer(789));
    rec_2.add( new Long(1000L));
    rec_2.add( new Double(5.3D));
    rec_2.add( new String("hcat and hadoop"));
    rec_2.add( null);
    rec_2.add( "null");
    HCatRecord tup_2 = new DefaultHCatRecord(rec_2);

    List<Object> rec_3 = new ArrayList<Object>(10);
    rec_3.add(new Byte("123"));
    rec_3.add(new Short("456"));
    rec_3.add( new Integer(789));
    rec_3.add( new Long(1000L));
    rec_3.add( new Double(5.3D));
    rec_3.add( new String("hcat and hadoop"));
    rec_3.add( null);
    List<Integer> innerList = new ArrayList<Integer>();
    innerList.add(314);
    innerList.add(007);
    rec_3.add( innerList);
    Map<Short, String> map = new HashMap<Short, String>(3);
    map.put(new Short("2"), "hcat is cool");
    map.put(new Short("3"), "is it?");
    map.put(new Short("4"), "or is it not?");
    rec_3.add(map);

    HCatRecord tup_3 = new DefaultHCatRecord(rec_3);

    List<Object> rec_4 = new ArrayList<Object>(8);
    rec_4.add( new Byte("123"));
    rec_4.add( new Short("456"));
    rec_4.add( new Integer(789));
    rec_4.add( new Long(1000L));
    rec_4.add( new Double(5.3D));
    rec_4.add( new String("hcat and hadoop"));
    rec_4.add( null);
    rec_4.add( "null");

    Map<Short, String> map2 = new HashMap<Short, String>(3);
    map2.put(new Short("2"), "hcat is cool");
    map2.put(new Short("3"), "is it?");
    map2.put(new Short("4"), "or is it not?");
    rec_4.add(map2);
    List<Integer> innerList2 = new ArrayList<Integer>();
    innerList2.add(314);
    innerList2.add(007);
    rec_4.add( innerList2);
    HCatRecord tup_4 = new DefaultHCatRecord(rec_4);

    return  new HCatRecord[]{tup_1,tup_2,tup_3,tup_4};

  }
View Full Code Here

     * @see org.apache.hadoop.mapreduce.RecordReader#getCurrentValue()
     */
    @Override
    public HCatRecord getCurrentValue()
    throws IOException, InterruptedException {
      HCatRecord r;

      try {

        r = new LazyHCatRecord(serde.deserialize(currentValue),serde.getObjectInspector());
        DefaultHCatRecord dr = new DefaultHCatRecord(outputSchema.size());
        int i = 0;
        for (String fieldName : outputSchema.getFieldNames()){
          Integer dataPosn = null;
          if ((dataPosn = dataSchema.getPosition(fieldName)) != null){
            dr.set(i, r.get(fieldName,dataSchema));
          } else {
            dr.set(i, valuesNotInDataCols.get(fieldName));
          }
          i++;
        }
View Full Code Here

            age = value.get(1)==null?null:(Integer)value.get(1);
            gpa = value.get(2)==null?null:(Double)value.get(2);

            if (gpa != null) gpa = Math.floor(gpa) + 0.1;
           
            HCatRecord record = new DefaultHCatRecord(5);
            record.set(0, name);
            record.set(1, age);
            record.set(2, gpa);
           
            context.write(null, record);

        }
View Full Code Here

          Iterator<IntWritable> iter = values.iterator();
          while (iter.hasNext()) {
              sum++;
              iter.next();
          }
          HCatRecord record = new DefaultHCatRecord(2);
          record.set(0, key.get());
          record.set(1, sum);
         
          context.write(null, record);
        }
View Full Code Here

                throws IOException, InterruptedException {
            String name = (String)value.get(0);
            int age = (Integer)value.get(1);
            String ds = (String)value.get(3);
           
            HCatRecord record = (filter == null ? new DefaultHCatRecord(3) : new DefaultHCatRecord(2));
            record.set(0, name);
            record.set(1, age);
            if (filter == null) record.set(2, ds);
           
            context.write(null, record);

        }
View Full Code Here

        intnum100 = (((Integer) value.get(3)));
        intnum = ((Integer) value.get(4));
        longnum = ((Long) value.get(5));
        floatnum = ((Float) value.get(6));
        doublenum = ((Double) value.get(7));
        HCatRecord output = new DefaultHCatRecord(8);
        output.set(0, intnum1000 + 10);
        if(isnoPartPig)
        {
            output.set(1, ((int)(id + 10)));
        } else {
            output.set(1, ((short)(id + 10)));
        }
        if(isnoPartPig) {
            output.set(2(int)(intnum5 + 10));
        } else {
            output.set(2, (byte) (intnum5 + 10));
        }
       
        output.set(3, intnum100 + 10);
        output.set(4, intnum + 10);
        output.set(5, (long) (longnum + 10));
        output.set(6, (float) (floatnum + 10));
        output.set(7, (double) (doublenum + 10));
        for(int i = 0; i < 8; i++) {
            System.err.println("XXX: class:" + output.get(i).getClass());
        }
        context.write(new IntWritable(0), output);

    }
View Full Code Here

            b = (Long)value.get(3);
            f = (Float)value.get(4);
            d = (Double)value.get(5);
            s = (String)value.get(6);
           
            HCatRecord record = new DefaultHCatRecord(7);
            record.set(0, t);
            record.set(1, si);
            record.set(2, i);
            record.set(3, b);
            record.set(4, f);
            record.set(5, d);
            record.set(6, s);
           
            context.write(null, record);

        }
View Full Code Here

TOP

Related Classes of org.apache.hcatalog.data.HCatRecord

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.