Package org.apache.hive.hcatalog.data

Examples of org.apache.hive.hcatalog.data.HCatRecord


    private int i = 0;

    @Override
    protected void map(LongWritable key, Text value, Context context)
      throws IOException, InterruptedException {
      HCatRecord record = null;
      String[] splits = value.toString().split(",");
      switch (i) {
      case 0:
        record = new DefaultHCatRecord(2);
        record.set(0, splits[0]);
        record.set(1, splits[1]);
        break;
      case 1:
        record = new DefaultHCatRecord(1);
        record.set(0, splits[0]);
        break;
      case 2:
        record = new DefaultHCatRecord(3);
        record.set(0, splits[0]);
        record.set(1, splits[1]);
        record.set(2, "extra");
        break;
      default:
        Assert.fail("This should not happen!!!!!");
      }
      MultiOutputFormat.write(tableNames[i], null, record, context);
View Full Code Here


    @Override
    public void map(LongWritable key, Text value, Context context
    ) throws IOException, InterruptedException {
      {
        try {
          HCatRecord rec = writeRecords.get(writeCount);
          context.write(null, rec);
          writeCount++;

        } catch (Exception e) {
View Full Code Here

    });

    Object expectedVal = null;
    Object actualVal = null;
    for (int i = 0; i < recs.size(); ++i) {
      HCatRecord rec = recs.get(i);
      expectedVal = i;
      actualVal = rec.get("id", schema);
      LOG.info("Validating field: id (expected = "
        + expectedVal + ", actual = " + actualVal + ")");
      HCatalogTestUtils.assertEquals(expectedVal, actualVal);
      expectedVal = "textfield" + i;
      actualVal = rec.get("msg", schema);
      LOG.info("Validating field: msg (expected = "
        + expectedVal + ", actual = " + actualVal + ")");
      HCatalogTestUtils.assertEquals(rec.get("msg", schema), "textfield" + i);
      for (ColumnGenerator col : cols) {
        String name = col.getName().toLowerCase();
        expectedVal = col.getHCatValue(i);
        actualVal = rec.get(name, schema);
        LOG.info("Validating field: " + name + " (expected = "
          + expectedVal + ", actual = " + actualVal + ")");
        HCatalogTestUtils.assertEquals(expectedVal, actualVal);
      }
    }
View Full Code Here

    @Override
    public void map(LongWritable key, Text value,
      Context context)
      throws IOException, InterruptedException {
      try {
        HCatRecord rec = recsToLoad.get(writtenRecordCount);
        context.write(null, rec);
        writtenRecordCount++;
      } catch (Exception e) {
        if (LOG.isDebugEnabled()) {
          e.printStackTrace(System.err);
View Full Code Here

    if (colCount == -1) {
      colCount = sqr.getFieldMap().size();
    }

    Map<String, Object> fieldMap = sqr.getFieldMap();
    HCatRecord result = new DefaultHCatRecord(fieldCount);

    for (Map.Entry<String, Object> entry : fieldMap.entrySet()) {
      String key = entry.getKey();
      Object val = entry.getValue();
      String hfn = key.toLowerCase();
      boolean skip = false;
      if (staticPartitionKeys != null && staticPartitionKeys.length > 0) {
        for (int i = 0; i < staticPartitionKeys.length; ++i) {
          if (staticPartitionKeys[i].equals(hfn)) {
            skip = true;
            break;
          }
        }
      }
      if (skip) {
        continue;
      }
      HCatFieldSchema hfs = hCatFullTableSchema.get(hfn);
      if (debugHCatImportMapper) {
        LOG.debug("SqoopRecordVal: field = " + key + " Val " + val
          + " of type " + (val == null ? null : val.getClass().getName())
          + ", hcattype " + hfs.getTypeString());
      }
      Object hCatVal = toHCat(val, hfs);

      result.set(hfn, hCatFullTableSchema, hCatVal);
    }

    return result;
  }
View Full Code Here

    if (colCount == -1) {
      colCount = sqr.getFieldMap().size();
    }

    Map<String, Object> fieldMap = sqr.getFieldMap();
    HCatRecord result = new DefaultHCatRecord(fieldCount);

    for (Map.Entry<String, Object> entry : fieldMap.entrySet()) {
      String key = entry.getKey();
      Object val = entry.getValue();
      String hfn = key.toLowerCase();
      boolean skip = false;
      if (staticPartitionKeys != null && staticPartitionKeys.length > 0) {
        for (int i = 0; i < staticPartitionKeys.length; ++i) {
          if (staticPartitionKeys[i].equals(hfn)) {
            skip = true;
            break;
          }
        }
      }
      if (skip) {
        continue;
      }
      HCatFieldSchema hfs = hCatFullTableSchema.get(hfn);
      if (debugHCatImportMapper) {
        LOG.debug("SqoopRecordVal: field = " + key + " Val " + val
          + " of type " + (val == null ? null : val.getClass().getName())
          + ", hcattype " + hfs.getTypeString());
      }
      Object hCatVal = toHCat(val, hfs);

      result.set(hfn, hCatFullTableSchema, hCatVal);
    }

    return result;
  }
View Full Code Here

      currentKey = baseRecordReader.createKey();
      currentValue = baseRecordReader.createValue();
    }

    while (baseRecordReader.next(currentKey, currentValue)) {
      HCatRecord r = null;
      Throwable t = null;

      errorTracker.incRecords();

      try {
        Object o = deserializer.deserialize(currentValue);
        r = new LazyHCatRecord(o, deserializer.getObjectInspector());
      } catch (Throwable throwable) {
        t = throwable;
      }

      if (r == null) {
        errorTracker.incErrors(t);
        continue;
      }

      DefaultHCatRecord dr = new DefaultHCatRecord(outputSchema.size());
      int i = 0;
      for (String fieldName : outputSchema.getFieldNames()) {
        if (dataSchema.getPosition(fieldName) != null) {
          dr.set(i, r.get(fieldName, dataSchema));
        } else {
          dr.set(i, valuesNotInDataCols.get(fieldName));
        }
        i++;
      }
View Full Code Here

    private int i = 0;

    @Override
    protected void map(LongWritable key, Text value, Context context)
      throws IOException, InterruptedException {
      HCatRecord record = null;
      String[] splits = value.toString().split(",");
      switch (i) {
      case 0:
        record = new DefaultHCatRecord(2);
        record.set(0, splits[0]);
        record.set(1, splits[1]);
        break;
      case 1:
        record = new DefaultHCatRecord(1);
        record.set(0, splits[0]);
        break;
      case 2:
        record = new DefaultHCatRecord(3);
        record.set(0, splits[0]);
        record.set(1, splits[1]);
        record.set(2, "extra");
        break;
      default:
        Assert.fail("This should not happen!!!!!");
      }
      MultiOutputFormat.write(tableNames[i], null, record, context);
View Full Code Here

    @Override
    public void map(LongWritable key, Text value, Context context
    ) throws IOException, InterruptedException {
      {
        try {
          HCatRecord rec = writeRecords.get(writeCount);
          context.write(null, rec);
          writeCount++;

        } catch (Exception e) {
View Full Code Here

    try {
      committer = outFormat.getOutputCommitter(cntxt);
      committer.setupTask(cntxt);
      writer = outFormat.getRecordWriter(cntxt);
      while (recordItr.hasNext()) {
        HCatRecord rec = recordItr.next();
        writer.write(null, rec);
      }
      writer.close(cntxt);
      if (committer.needsTaskCommit(cntxt)) {
        committer.commitTask(cntxt);
View Full Code Here

TOP

Related Classes of org.apache.hive.hcatalog.data.HCatRecord

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.