Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.Job


    static List<StageInputSplit> computeSplits(JobContext context) throws IOException, InterruptedException {
        assert context != null;
        Map<FormatAndMapper, List<StageInput>> paths = getPaths(context);
        Map<Class<? extends InputFormat<?, ?>>, InputFormat<?, ?>> formats =
            instantiateFormats(context, paths.keySet());
        Job temporaryJob = JobCompatibility.newJob(context.getConfiguration());
        List<StageInputSplit> results = new ArrayList<StageInputSplit>();
        for (Map.Entry<FormatAndMapper, List<StageInput>> entry : paths.entrySet()) {
            FormatAndMapper formatAndMapper = entry.getKey();
            List<StageInput> current = entry.getValue();
            InputFormat<?, ?> format = formats.get(formatAndMapper.formatClass);
View Full Code Here


            TestContext context) throws IOException {
        LOG.info("エクスポート結果を取得します: {}", description);
        VariableTable variables = createVariables(context);
        checkType(definition, description);
        Configuration conf = configurations.newInstance();
        Job job = JobCompatibility.newJob(conf);
        String resolved = variables.parse(description.getPathPrefix(), false);
        FileInputFormat.setInputPaths(job, new Path(resolved));
        TaskAttemptContext taskContext = JobCompatibility.newTaskAttemptContext(
                job.getConfiguration(),
                JobCompatibility.newTaskAttemptId(JobCompatibility.newTaskId(JobCompatibility.newJobId())));
        FileInputFormat<?, V> format = getOpposite(conf, description.getOutputFormat());
        FileInputFormatDriver<V> result = new FileInputFormatDriver<V>(definition, taskContext, format);
        return result;
    }
View Full Code Here

            final String destination,
            FileOutputFormat<? super NullWritable, ? super V> output) throws IOException {
        assert destination != null;
        assert output != null;
        LOG.debug("Opening {} using {}", destination, output.getClass().getName());
        Job job = JobCompatibility.newJob(configuration);
        job.setOutputKeyClass(NullWritable.class);
        job.setOutputValueClass(definition.getModelClass());
        final File temporaryDir = File.createTempFile("asakusa", ".tempdir");
        if (temporaryDir.delete() == false || temporaryDir.mkdirs() == false) {
            throw new IOException("Failed to create temporary directory");
        }
        LOG.debug("Using staging deploy target: {}", temporaryDir);
        URI uri = temporaryDir.toURI();
        FileOutputFormat.setOutputPath(job, new Path(uri));
        TaskAttemptContext context = JobCompatibility.newTaskAttemptContext(
                job.getConfiguration(),
                JobCompatibility.newTaskAttemptId(JobCompatibility.newTaskId(JobCompatibility.newJobId())));
        FileOutputFormatDriver<V> result = new FileOutputFormatDriver<V>(context, output, NullWritable.get()) {
            @Override
            public void close() throws IOException {
                super.close();
View Full Code Here

   */
  private JobControl createDependencies(Configuration conf, Job job1)
      throws Exception {
    List<ControlledJob> dependingJobs = null;
    cjob1 = new ControlledJob(job1, dependingJobs);
    Job job2 = MapReduceTestUtil.createCopyJob(conf, outdir_2, indir);
    cjob2 = new ControlledJob(job2, dependingJobs);

    Job job3 = MapReduceTestUtil.createCopyJob(conf, outdir_3,
                                     outdir_1, outdir_2);
    dependingJobs = new ArrayList<ControlledJob>();
    dependingJobs.add(cjob1);
    dependingJobs.add(cjob2);
    cjob3 = new ControlledJob(job3, dependingJobs);

    Job job4 = MapReduceTestUtil.createCopyJob(conf, outdir_4, outdir_3);
    dependingJobs = new ArrayList<ControlledJob>();
    dependingJobs.add(cjob3);
    cjob4 = new ControlledJob(job4, dependingJobs);

    JobControl theControl = new JobControl("Test");
View Full Code Here

    Configuration conf = createJobConf();

    cleanupData(conf);
   
    // create a Fail job
    Job job1 = MapReduceTestUtil.createFailJob(conf, outdir_1, indir);
   
    // create job dependencies
    JobControl theControl = createDependencies(conf, job1);
   
    // wait till all the jobs complete
View Full Code Here

  public void testJobControlWithKillJob() throws Exception {
    LOG.info("Starting testJobControlWithKillJob");

    Configuration conf = createJobConf();
    cleanupData(conf);
    Job job1 = MapReduceTestUtil.createKillJob(conf, outdir_1, indir);
    JobControl theControl = createDependencies(conf, job1);

    while (cjob1.getJobState() != ControlledJob.State.RUNNING) {
      try {
        Thread.sleep(100);
View Full Code Here

    Configuration conf = createJobConf();

    cleanupData(conf);
   
    Job job1 = MapReduceTestUtil.createCopyJob(conf, outdir_1, indir);
   
    JobControl theControl = createDependencies(conf, job1);
   
    // wait till all the jobs complete
    waitTillAllFinished(theControl);
View Full Code Here

  public void testControlledJob() throws Exception {
    LOG.info("Starting testControlledJob");

    Configuration conf = createJobConf();
    cleanupData(conf);
    Job job1 = MapReduceTestUtil.createCopyJob(conf, outdir_1, indir);
    JobControl theControl = createDependencies(conf, job1);
    while (cjob1.getJobState() != ControlledJob.State.RUNNING) {
      try {
        Thread.sleep(100);
      } catch (InterruptedException e) {
View Full Code Here

  @Test
  public void testErrorWhileSubmitting() throws Exception {
    JobControl jobControl = new JobControl("Test");
   
    Job mockJob = mock(Job.class);
   
    ControlledJob job1 = new ControlledJob(mockJob, null);
    when(mockJob.getConfiguration()).thenReturn(new Configuration());
    doThrow(new IncompatibleClassChangeError("This is a test")).when(mockJob).submit();
   
    jobControl.addJob(job1);
   
    runJobControl(jobControl);
View Full Code Here

  }
 
  private Job createJob(boolean complete, boolean successful)
    throws IOException, InterruptedException {
    // Create a stub Job that responds in a controlled way
    Job mockJob = mock(Job.class);
    when(mockJob.getConfiguration()).thenReturn(new Configuration());
    when(mockJob.isComplete()).thenReturn(complete);
    when(mockJob.isSuccessful()).thenReturn(successful);
    return mockJob;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.Job

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.