Package org.apache.hadoop.mapred

Examples of org.apache.hadoop.mapred.Reporter


  @SuppressWarnings({ "deprecation", "unchecked" })
  public void shouldCreateNewKeyAlthoughExtraKey() throws Exception {
    GroupingTableMap gTableMap = null;
    try {
      Result result = mock(Result.class);
      Reporter reporter = mock(Reporter.class);
      gTableMap = new GroupingTableMap();
      Configuration cfg = new Configuration();
      cfg.set(GroupingTableMap.GROUP_COLUMNS, "familyA:qualifierA familyB:qualifierB");
      JobConf jobConf = new JobConf(cfg);
      gTableMap.configure(jobConf);
View Full Code Here


  @SuppressWarnings({ "deprecation" })
  public void shouldCreateNewKey() throws Exception {
    GroupingTableMap gTableMap = null
    try {
      Result result = mock(Result.class);
      Reporter reporter = mock(Reporter.class);
      final byte[] bSeparator = Bytes.toBytes(" ");
      gTableMap = new GroupingTableMap();
      Configuration cfg = new Configuration();
      cfg.set(GroupingTableMap.GROUP_COLUMNS, "familyA:qualifierA familyB:qualifierB");
      JobConf jobConf = new JobConf(cfg);
View Full Code Here

      InputSplit split = splits[i];
      Assert.assertTrue(split instanceof TableSnapshotInputFormat.TableSnapshotRegionSplit);

      // validate record reader
      OutputCollector collector = mock(OutputCollector.class);
      Reporter reporter = mock(Reporter.class);
      RecordReader<ImmutableBytesWritable, Result> rr = tsif.getRecordReader(split, job, reporter);

      // validate we can read all the data back
      ImmutableBytesWritable key = rr.createKey();
      Result value = rr.createValue();
View Full Code Here

                  getValueClass(),
                  allocateRawComparator());
           
            try {
              LOG.info("Execute Local for Query:" + getQueryId() +" Running Merger");
              merger.mergeAndSpill(new Reporter() {
               
                @Override
                public void progress() {
                }
               
View Full Code Here

                        Properties tableProperties,
                        Progressable progress) throws IOException {
    final RecordWriter result =
      super.getHiveRecordWriter(jc,outPath,valueClass,isCompressed,
        tableProperties,progress);
    final Reporter reporter = (Reporter) progress;
    reporter.setStatus("got here");
    System.out.println("Got a reporter " + reporter);
    return new RecordWriter() {
      @Override
      public void write(Writable w) throws IOException {
        if (w instanceof Text) {
View Full Code Here

    JobConf job = new JobConf(conf);
    FileSystem fs = FileSystem.getNamed("local", conf);
    Path dir = new Path(System.getProperty("test.build.data",".") + "/mapred");
    Path file = new Path(dir, "test.seq");
   
    Reporter reporter = new Reporter() {
        public void setStatus(String status) throws IOException {}
        public void progress() throws IOException {}
      };
   
    int seed = new Random().nextInt();
View Full Code Here

  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
  }

  public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Reporter reporter = context.getReporter();
    Counters.Counter counter = reporter.getCounter("org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS");
    result.set(counter.getValue());
    return result;
  }
View Full Code Here

      conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
      initStdOut(conf);
      conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
      CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
              new Counters.Counter(), new Progress());
      Reporter reporter = new TestTaskReporter();
      List<Text> texts = new ArrayList<Text>();
      texts.add(new Text("first"));
      texts.add(new Text("second"));
      texts.add(new Text("third"));
View Full Code Here

  public void testFormat() throws IOException {

    PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
    JobConf conf = new JobConf();

    Reporter reporter= mock(Reporter.class);
    RecordReader<FloatWritable, NullWritable> reader = inputFormat
        .getRecordReader(new FakeSplit(), conf, reporter);
    assertEquals(0.0f, reader.getProgress(), 0.001);

    // input and output files
View Full Code Here

    JobConf job = new JobConf(conf);
    FileSystem fs = FileSystem.getNamed("local", conf);
    Path dir = new Path(System.getProperty("test.build.data",".") + "/mapred");
    Path file = new Path(dir, "test.seq");
   
    Reporter reporter = new Reporter() {
        public void setStatus(String status) throws IOException {}
      };
   
    int seed = new Random().nextInt();
    //LOG.info("seed = "+seed);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.Reporter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.