Package org.apache.hcatalog.data

Examples of org.apache.hcatalog.data.DefaultHCatRecord


    for(int i = 0;i < 20;i++) {
      List<Object> objList = new ArrayList<Object>();

      objList.add(i);
      objList.add("strvalue" + i);
      writeRecords.add(new DefaultHCatRecord(objList));
    }

    partitionColumns = new ArrayList<HCatFieldSchema>();
    partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")));
    partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")));
View Full Code Here


      objList.add(i);
      objList.add("strvalue" + i);
      objList.add("str2value" + i);

      writeRecords.add(new DefaultHCatRecord(objList));
    }

    Map<String, String> partitionMap = new HashMap<String, String>();
    partitionMap.put("part1", "p1value5");
    partitionMap.put("part0", "p0value5");

    runMRCreate(partitionMap, partitionColumns, writeRecords, 10,true);

    tableSchema = getTableSchema();

    //assert that c3 has got added to table schema
    assertEquals(5, tableSchema.getFields().size());
    assertEquals("c1", tableSchema.getFields().get(0).getName());
    assertEquals("c2", tableSchema.getFields().get(1).getName());
    assertEquals("c3", tableSchema.getFields().get(2).getName());
    assertEquals("part1", tableSchema.getFields().get(3).getName());
    assertEquals("part0", tableSchema.getFields().get(4).getName());

    //Test that changing column data type fails
    partitionMap.clear();
    partitionMap.put("part1", "p1value6");
    partitionMap.put("part0", "p0value6");

    partitionColumns = new ArrayList<HCatFieldSchema>();
    partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")));
    partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.INT_TYPE_NAME, "")));

    IOException exc = null;
    try {
      runMRCreate(partitionMap, partitionColumns, writeRecords, 20,true);
    } catch(IOException e) {
      exc = e;
    }

    assertTrue(exc != null);
    assertTrue(exc instanceof HCatException);
    assertEquals(ErrorType.ERROR_SCHEMA_TYPE_MISMATCH, ((HCatException) exc).getErrorType());

    //Test that partition key is not allowed in data
    partitionColumns = new ArrayList<HCatFieldSchema>();
    partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")));
    partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")));
    partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", Constants.STRING_TYPE_NAME, "")));
    partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("part1", Constants.STRING_TYPE_NAME, "")));

    List<HCatRecord> recordsContainingPartitionCols = new ArrayList<HCatRecord>(20);
    for(int i = 0;i < 20;i++) {
      List<Object> objList = new ArrayList<Object>();

      objList.add(i);
      objList.add("c2value" + i);
      objList.add("c3value" + i);
      objList.add("p1value6");

      recordsContainingPartitionCols.add(new DefaultHCatRecord(objList));
    }

    exc = null;
    try {
      runMRCreate(partitionMap, partitionColumns, recordsContainingPartitionCols, 20,true);
View Full Code Here

      objList.add(i);
      objList.add("co strvalue" + i);
      objList.add("co str2value" + i);

      writeRecords.add(new DefaultHCatRecord(objList));
    }

    Map<String, String> partitionMap = new HashMap<String, String>();
    partitionMap.put("part1", "p1value8");
    partitionMap.put("part0", "p0value8");

    Exception exc = null;
    try {
      runMRCreate(partitionMap, partitionColumns, writeRecords, 10,true);
    } catch(IOException e) {
      exc = e;
    }

    assertTrue(exc != null);
    assertTrue(exc instanceof HCatException);
    assertEquals(ErrorType.ERROR_SCHEMA_COLUMN_MISMATCH, ((HCatException) exc).getErrorType());


    partitionColumns = new ArrayList<HCatFieldSchema>();
    partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", Constants.INT_TYPE_NAME, "")));
    partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", Constants.STRING_TYPE_NAME, "")));

    writeRecords = new ArrayList<HCatRecord>();

    for(int i = 0;i < 10;i++) {
      List<Object> objList = new ArrayList<Object>();

      objList.add(i);
      objList.add("co strvalue" + i);

      writeRecords.add(new DefaultHCatRecord(objList));
    }

    runMRCreate(partitionMap, partitionColumns, writeRecords, 10,true);

    //Read should get 10 + 20 + 10 + 10 + 20 rows
View Full Code Here

      List<Object> objList = new ArrayList<Object>();

      objList.add(i);
      objList.add("strvalue" + i);
      objList.add(String.valueOf((i % mod)+offset));
      writeRecords.add(new DefaultHCatRecord(objList));
    }
  }
View Full Code Here

    public static class MapHCatWrite extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {
        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key",schema,Integer.parseInt(vals[0]));
            for(int i=1;i<vals.length;i++) {
                String pair[] = vals[i].split(":");
                record.set(pair[0],schema,pair[1]);
            }
            context.write(null,record);
        }
View Full Code Here

    public static class MapHCatWrite extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {

        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key",schema,Integer.parseInt(vals[0]));
            for(int i=1;i<vals.length;i++) {
                String pair[] = vals[i].split(":");
                record.set(pair[0],schema,pair[1]);
            }
            context.write(null,record);
        }
View Full Code Here

    static class MapWriteAbortTransaction extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {

        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo)HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key", schema, Integer.parseInt(vals[0]));
            if (vals[0].equals("3")) {
                throw new IOException("Failing map to test abort");
            }
            for (int i = 1; i < vals.length; i++) {
                String pair[] = vals[i].split(":");
                record.set(pair[0], schema, pair[1]);
            }
            context.write(null, record);
        }
View Full Code Here

    InputStream fInStream = new FileInputStream(f);
    DataInput inpStream = new DataInputStream(fInStream);

    for(int i =0; i < recs.length; i++){
      HCatRecord rec = new DefaultHCatRecord();
      rec.readFields(inpStream);
      Assert.assertTrue(HCatDataCheckUtil.recordsEqual(recs[i],rec));
    }

    Assert.assertEquals(fInStream.available(), 0);
    fInStream.close();
View Full Code Here

    rec_1.add( new Double(5.3D));
    rec_1.add( new String("hcat and hadoop"));
    rec_1.add( null);
    rec_1.add( "null");

    HCatRecord tup_1 = new DefaultHCatRecord(rec_1);

    List<Object> rec_2 = new ArrayList<Object>(8);
    rec_2.add( new Byte("123"));
    rec_2.add( new Short("456"));
    rec_2.add( new Integer(789));
    rec_2.add( new Long(1000L));
    rec_2.add( new Double(5.3D));
    rec_2.add( new String("hcat and hadoop"));
    rec_2.add( null);
    rec_2.add( "null");
    HCatRecord tup_2 = new DefaultHCatRecord(rec_2);

    List<Object> rec_3 = new ArrayList<Object>(10);
    rec_3.add(new Byte("123"));
    rec_3.add(new Short("456"));
    rec_3.add( new Integer(789));
    rec_3.add( new Long(1000L));
    rec_3.add( new Double(5.3D));
    rec_3.add( new String("hcat and hadoop"));
    rec_3.add( null);
    List<Integer> innerList = new ArrayList<Integer>();
    innerList.add(314);
    innerList.add(007);
    rec_3.add( innerList);
    Map<Short, String> map = new HashMap<Short, String>(3);
    map.put(new Short("2"), "hcat is cool");
    map.put(new Short("3"), "is it?");
    map.put(new Short("4"), "or is it not?");
    rec_3.add(map);

    HCatRecord tup_3 = new DefaultHCatRecord(rec_3);

    List<Object> rec_4 = new ArrayList<Object>(8);
    rec_4.add( new Byte("123"));
    rec_4.add( new Short("456"));
    rec_4.add( new Integer(789));
    rec_4.add( new Long(1000L));
    rec_4.add( new Double(5.3D));
    rec_4.add( new String("hcat and hadoop"));
    rec_4.add( null);
    rec_4.add( "null");

    Map<Short, String> map2 = new HashMap<Short, String>(3);
    map2.put(new Short("2"), "hcat is cool");
    map2.put(new Short("3"), "is it?");
    map2.put(new Short("4"), "or is it not?");
    rec_4.add(map2);
    List<Integer> innerList2 = new ArrayList<Integer>();
    innerList2.add(314);
    innerList2.add(007);
    rec_4.add( innerList2);
    HCatRecord tup_4 = new DefaultHCatRecord(rec_4);

    return  new HCatRecord[]{tup_1,tup_2,tup_3,tup_4};

  }
View Full Code Here

  public static class Map extends Mapper<LongWritable, Text, NullWritable, DefaultHCatRecord>{

      public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
          String[] cols = value.toString().split(",");
          DefaultHCatRecord record = new DefaultHCatRecord(3);
          record.set(0,Integer.parseInt(cols[0]));
          record.set(1,cols[1]);
          record.set(2,cols[2]);
          context.write(NullWritable.get(), record);
      }
View Full Code Here

TOP

Related Classes of org.apache.hcatalog.data.DefaultHCatRecord

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.