Package org.apache.hcatalog.data

Examples of org.apache.hcatalog.data.DefaultHCatRecord


            objList.add(i);
            objList.add("strvalue" + i);
            objList.add("str2value" + i);

            writeRecords.add(new DefaultHCatRecord(objList));
        }

        Map<String, String> partitionMap = new HashMap<String, String>();
        partitionMap.put("part1", "p1value5");
        partitionMap.put("part0", "p0value5");

        runMRCreate(partitionMap, partitionColumns, writeRecords, 10, true);

        tableSchema = getTableSchema();

        //assert that c3 has got added to table schema
        assertEquals(5, tableSchema.getFields().size());
        assertEquals("c1", tableSchema.getFields().get(0).getName());
        assertEquals("c2", tableSchema.getFields().get(1).getName());
        assertEquals("c3", tableSchema.getFields().get(2).getName());
        assertEquals("part1", tableSchema.getFields().get(3).getName());
        assertEquals("part0", tableSchema.getFields().get(4).getName());

        //Test that changing column data type fails
        partitionMap.clear();
        partitionMap.put("part1", "p1value6");
        partitionMap.put("part0", "p0value6");

        partitionColumns = new ArrayList<HCatFieldSchema>();
        partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")));
        partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.INT_TYPE_NAME, "")));

        IOException exc = null;
        try {
            runMRCreate(partitionMap, partitionColumns, writeRecords, 20, true);
        } catch (IOException e) {
            exc = e;
        }

        assertTrue(exc != null);
        assertTrue(exc instanceof HCatException);
        assertEquals(ErrorType.ERROR_SCHEMA_TYPE_MISMATCH, ((HCatException) exc).getErrorType());

        //Test that partition key is not allowed in data
        partitionColumns = new ArrayList<HCatFieldSchema>();
        partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")));
        partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")));
        partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", serdeConstants.STRING_TYPE_NAME, "")));
        partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("part1", serdeConstants.STRING_TYPE_NAME, "")));

        List<HCatRecord> recordsContainingPartitionCols = new ArrayList<HCatRecord>(20);
        for (int i = 0; i < 20; i++) {
            List<Object> objList = new ArrayList<Object>();

            objList.add(i);
            objList.add("c2value" + i);
            objList.add("c3value" + i);
            objList.add("p1value6");

            recordsContainingPartitionCols.add(new DefaultHCatRecord(objList));
        }

        exc = null;
        try {
            runMRCreate(partitionMap, partitionColumns, recordsContainingPartitionCols, 20, true);
View Full Code Here


            objList.add(i);
            objList.add("co strvalue" + i);
            objList.add("co str2value" + i);

            writeRecords.add(new DefaultHCatRecord(objList));
        }

        Map<String, String> partitionMap = new HashMap<String, String>();
        partitionMap.put("part1", "p1value8");
        partitionMap.put("part0", "p0value8");

        Exception exc = null;
        try {
            runMRCreate(partitionMap, partitionColumns, writeRecords, 10, true);
        } catch (IOException e) {
            exc = e;
        }

        assertTrue(exc != null);
        assertTrue(exc instanceof HCatException);
        assertEquals(ErrorType.ERROR_SCHEMA_COLUMN_MISMATCH, ((HCatException) exc).getErrorType());


        partitionColumns = new ArrayList<HCatFieldSchema>();
        partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")));
        partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")));

        writeRecords = new ArrayList<HCatRecord>();

        for (int i = 0; i < 10; i++) {
            List<Object> objList = new ArrayList<Object>();

            objList.add(i);
            objList.add("co strvalue" + i);

            writeRecords.add(new DefaultHCatRecord(objList));
        }

        runMRCreate(partitionMap, partitionColumns, writeRecords, 10, true);

        //Read should get 10 + 20 + 10 + 10 + 20 rows
View Full Code Here

        int i = 0;
        for (HCatFieldSchema fSchema : computedSchema.getFields()) {
            outgoing.add(getJavaObj(tuple.get(i++), fSchema));
        }
        try {
            writer.write(null, new DefaultHCatRecord(outgoing));
        } catch (InterruptedException e) {
            throw new BackendException("Error while writing tuple: " + tuple, PigHCatUtil.PIG_EXCEPTION_CODE, e);
        }
    }
View Full Code Here

            if (r == null) {
                errorTracker.incErrors(t);
                continue;
            }

            DefaultHCatRecord dr = new DefaultHCatRecord(outputSchema.size());
            int i = 0;
            for (String fieldName : outputSchema.getFieldNames()) {
                if (dataSchema.getPosition(fieldName) != null) {
                    dr.set(i, r.get(fieldName, dataSchema));
                } else {
                    dr.set(i, valuesNotInDataCols.get(fieldName));
                }
                i++;
            }

            currentHCatRecord = dr;
View Full Code Here

            throws IOException, InterruptedException {
            HCatRecord record = null;
            String[] splits = value.toString().split(",");
            switch (i) {
            case 0:
                record = new DefaultHCatRecord(2);
                record.set(0, splits[0]);
                record.set(1, splits[1]);
                break;
            case 1:
                record = new DefaultHCatRecord(1);
                record.set(0, splits[0]);
                break;
            case 2:
                record = new DefaultHCatRecord(3);
                record.set(0, splits[0]);
                record.set(1, splits[1]);
                record.set(2, "extra");
                break;
            default:
View Full Code Here

    public static class MapHCatWrite extends Mapper<LongWritable, Text, BytesWritable, HCatRecord> {
        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(context.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_INFO));
            HCatRecord record = new DefaultHCatRecord(3);
            HCatSchema schema = jobInfo.getOutputSchema();
            String vals[] = value.toString().split(",");
            record.setInteger("key", schema, Integer.parseInt(vals[0]));
            for (int i = 1; i < vals.length; i++) {
                String pair[] = vals[i].split(":");
                record.set(pair[0], schema, pair[1]);
            }
            context.write(null, record);
        }
View Full Code Here

            intnum100 = (((Integer) value.get(3)));
            intnum = ((Integer) value.get(4));
            longnum = ((Long) value.get(5));
            floatnum = ((Float) value.get(6));
            doublenum = ((Double) value.get(7));
            HCatRecord output = new DefaultHCatRecord(8);
            output.set(0, intnum1000 + 10);
            if (isnoPartPig) {
                output.set(1, ((int) (id + 10)));
            } else {
                output.set(1, ((short) (id + 10)));
            }
            if (isnoPartPig) {
                output.set(2, (int) (intnum5 + 10));
            } else {
                output.set(2, (byte) (intnum5 + 10));
            }

            output.set(3, intnum100 + 10);
            output.set(4, intnum + 10);
            output.set(5, (long) (longnum + 10));
            output.set(6, (float) (floatnum + 10));
            output.set(7, (double) (doublenum + 10));
            for (int i = 0; i < 8; i++) {
                System.err.println("XXX: class:" + output.get(i).getClass());
            }
            context.write(new IntWritable(0), output);

        }
View Full Code Here

            b = (Long) value.get(3);
            f = (Float) value.get(4);
            d = (Double) value.get(5);
            s = (String) value.get(6);

            HCatRecord record = new DefaultHCatRecord(7);
            record.set(0, t);
            record.set(1, si);
            record.set(2, i);
            record.set(3, b);
            record.set(4, f);
            record.set(5, d);
            record.set(6, s);

            context.write(null, record);

        }
View Full Code Here

                count++;
                sum += Double.parseDouble(gpa.toString());
            }

            HCatRecord record = new DefaultHCatRecord(2);
            record.set(0, name);
            record.set(1, Double.toString(sum));

            context.write(null, record);
        }
View Full Code Here

            b = (Long) value.get(3);
            f = (Float) value.get(4);
            d = (Double) value.get(5);
            s = (String) value.get(6);

            HCatRecord record = new DefaultHCatRecord(7);
            record.set(0, t);
            record.set(1, si);
            record.set(2, i);
            record.set(3, b);
            record.set(4, f);
            record.set(5, d);
            record.set(6, s);

            context.write(null, record);

        }
View Full Code Here

TOP

Related Classes of org.apache.hcatalog.data.DefaultHCatRecord

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.