Package org.apache.hadoop.mapreduce.Mapper

Examples of org.apache.hadoop.mapreduce.Mapper.Context


    LoadSplit split = getLoadSplit();

    MapContext<NullWritable, GridmixRecord, GridmixKey, GridmixRecord> mapContext = new MapContextImpl<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>(
            conf, taskId, reader, writer, committer, reporter, split);
    // context
    Context ctx = new WrappedMapper<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>()
            .getMapContext(mapContext);

    reader.initialize(split, ctx);
    ctx.getConfiguration().setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
    CompressionEmulationUtil.setCompressionEmulationEnabled(
            ctx.getConfiguration(), true);

    LoadJob.LoadMapper mapper = new LoadJob.LoadMapper();
    // setup, map, clean
    mapper.run(ctx);
View Full Code Here


    OutputCommitter committer = new CustomOutputCommitter();
    StatusReporter reporter = new TaskAttemptContextImpl.DummyReporter();
    SleepSplit split = getSleepSplit();
    MapContext<LongWritable, LongWritable, GridmixKey, NullWritable> mapcontext = new MapContextImpl<LongWritable, LongWritable, GridmixKey, NullWritable>(
            conf, taskId, reader, writer, committer, reporter, split);
    Context context = new WrappedMapper<LongWritable, LongWritable, GridmixKey, NullWritable>()
            .getMapContext(mapcontext);

    long start = System.currentTimeMillis();
    LOG.info("start:" + start);
    LongWritable key = new LongWritable(start + 2000);
View Full Code Here

        private static final String FILENAME = "emitter_";
        private DataOutputStream out;

        @Override
        public void preApplication() {
            Context context = getContext();
            FileSystem fs;

            try {
                fs = FileSystem.get(context.getConfiguration());

                String p = context.getConfiguration()
                    .get(SimpleVertexWithWorkerContext.OUTPUTDIR);
                if (p == null) {
                    throw new IllegalArgumentException(
                        SimpleVertexWithWorkerContext.OUTPUTDIR +
                        " undefined!");
                }

                Path path = new Path(p);
                if (!fs.exists(path)) {
                    throw new IllegalArgumentException(path +
                            " doesn't exist");
                }

                Path outF = new Path(path, FILENAME +
                        context.getTaskAttemptID());
                if (fs.exists(outF)) {
                    throw new IllegalArgumentException(outF +
                            " aready exists");
                }
View Full Code Here

import org.apache.hadoop.mapreduce.Mapper.Context;

public class RPCCommunicationsTest extends TestCase {

    public void testDuplicateRpcPort() throws Exception {
        @SuppressWarnings("rawtypes")
        Context context = mock(Context.class);
        Configuration conf = new Configuration();
        conf.setInt("mapred.task.partition", 9);
        conf.setInt(GiraphJob.MAX_WORKERS, 13);
        when(context.getConfiguration()).thenReturn(conf);
        when(context.getJobID()).thenReturn(new JobID());

        RPCCommunications<IntWritable, IntWritable, IntWritable, IntWritable>
            comm1 =
                new RPCCommunications<
                    IntWritable, IntWritable,
View Full Code Here

    // Setup the conf
    GiraphConfiguration tmpConf = new GiraphConfiguration();
    GiraphConstants.VERTEX_CLASS.set(tmpConf, TestVertex.class);
    conf = new ImmutableClassesGiraphConfiguration(tmpConf);

    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    // Start the service
    serverData = MockUtils.createNewServerData(conf, context);
    workerInfo = new WorkerInfo();
    server = new NettyServer(conf,
View Full Code Here

   *
   * @throws IOException
   */
  @Test
  public void connectSingleClientServer() throws IOException {
    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    ServerData<IntWritable, IntWritable, IntWritable, IntWritable> serverData =
        MockUtils.createNewServerData(conf, context);
    WorkerInfo workerInfo = new WorkerInfo();
    NettyServer server =
View Full Code Here

   *
   * @throws IOException
   */
  @Test
  public void connectOneClientToThreeServers() throws IOException {
    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    ServerData<IntWritable, IntWritable, IntWritable, IntWritable> serverData =
        MockUtils.createNewServerData(conf, context);
   RequestServerHandler.Factory requestServerHandlerFactory =
       new WorkerRequestServerHandler.Factory(serverData);
View Full Code Here

   *
   * @throws IOException
   */
  @Test
  public void connectThreeClientsToOneServer() throws IOException {
    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    ServerData<IntWritable, IntWritable, IntWritable, IntWritable> serverData =
        MockUtils.createNewServerData(conf, context);
    WorkerInfo workerInfo = new WorkerInfo();
    NettyServer server = new NettyServer(conf,
View Full Code Here

    /** Output stream to dump the strings. */
    private DataOutputStream out;

    @Override
    public void preApplication() {
      Context context = getContext();
      FileSystem fs;

      try {
        fs = FileSystem.get(context.getConfiguration());

        String p = context.getConfiguration()
            .get(SimpleVertexWithWorkerContext.OUTPUTDIR);
        if (p == null) {
          throw new IllegalArgumentException(
              SimpleVertexWithWorkerContext.OUTPUTDIR +
              " undefined!");
        }

        Path path = new Path(p);
        if (!fs.exists(path)) {
          throw new IllegalArgumentException(path +
              " doesn't exist");
        }

        Path outF = new Path(path, FILENAME +
            context.getTaskAttemptID());
        if (fs.exists(outF)) {
          throw new IllegalArgumentException(outF +
              " aready exists");
        }

View Full Code Here

   *
   * @throws IOException
   */
  @Test
  public void connectSingleClientServer() throws IOException {
    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    ServerData<IntWritable, IntWritable, IntWritable, IntWritable> serverData =
        MockUtils.createNewServerData(conf, context);

    SaslServerHandler.Factory mockedSaslServerFactory =
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.Mapper.Context

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.