Package org.apache.hadoop.mapreduce.v2.app

Examples of org.apache.hadoop.mapreduce.v2.app.AppContext


import com.google.inject.Injector;

public class TestAMWebApp {

  @Test public void testAppControllerIndex() {
    AppContext ctx = new MockAppContext(0, 1, 1, 1);
    Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
    AppController controller = injector.getInstance(AppController.class);
    controller.index();
    assertEquals(ctx.getApplicationID().toString(), controller.get(APP_ID,""));
  }
View Full Code Here


  }


 
  @Test public void testJobView() {
    AppContext appContext = new MockAppContext(0, 1, 1, 1);
    Map<String, String> params = getJobParams(appContext);
    WebAppTests.testPage(JobPage.class, AppContext.class, appContext, params);
  }
View Full Code Here

    Map<String, String> params = getJobParams(appContext);
    WebAppTests.testPage(JobPage.class, AppContext.class, appContext, params);
  }

  @Test public void testTasksView() {
    AppContext appContext = new MockAppContext(0, 1, 1, 1);
    Map<String, String> params = getTaskParams(appContext);
    WebAppTests.testPage(TasksPage.class, AppContext.class, appContext, params);
  }
View Full Code Here

    Map<String, String> params = getTaskParams(appContext);
    WebAppTests.testPage(TasksPage.class, AppContext.class, appContext, params);
  }

  @Test public void testTaskView() {
    AppContext appContext = new MockAppContext(0, 1, 1, 1);
    Map<String, String> params = getTaskParams(appContext);
    App app = new App(appContext);
    app.setJob(appContext.getAllJobs().values().iterator().next());
    app.setTask(app.getJob().getTasks().values().iterator().next());
    WebAppTests.testPage(TaskPage.class, App.class, app, params);
  }
View Full Code Here

    WebAppTests.testPage(JobConfPage.class, AppContext.class,
                         new MockAppContext(0, 1, 1, 1));
  }

  @Test public void testCountersView() {
    AppContext appContext = new MockAppContext(0, 1, 1, 1);
    Map<String, String> params = getJobParams(appContext);
    WebAppTests.testPage(CountersPage.class, AppContext.class,
                         appContext, params);
  }
View Full Code Here

    WebAppTests.testPage(CountersPage.class, AppContext.class,
                         appContext, params);
  }
 
  @Test public void testSingleCounterView() {
    AppContext appContext = new MockAppContext(0, 1, 1, 1);
    Job job = appContext.getAllJobs().values().iterator().next();
    // add a failed task to the job without any counters
    Task failedTask = MockJobs.newTask(job.getID(), 2, 1, true);
    Map<TaskId,Task> tasks = job.getTasks();
    tasks.put(failedTask.getID(), failedTask);
    Map<String, String> params = getJobParams(appContext);
View Full Code Here

    WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
                         appContext, params);
  }

  @Test public void testTaskCountersView() {
    AppContext appContext = new MockAppContext(0, 1, 1, 1);
    Map<String, String> params = getTaskParams(appContext);
    WebAppTests.testPage(CountersPage.class, AppContext.class,
                         appContext, params);
  }
View Full Code Here

    WebAppTests.testPage(CountersPage.class, AppContext.class,
                         appContext, params);
  }

  @Test public void testSingleTaskCounterView() {
    AppContext appContext = new MockAppContext(0, 1, 1, 2);
    Map<String, String> params = getTaskParams(appContext);
    params.put(AMParams.COUNTER_GROUP,
        "org.apache.hadoop.mapreduce.FileSystemCounter");
    params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
   
    // remove counters from one task attempt
    // to test handling of missing counters
    TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
    Job job = appContext.getJob(taskID.getJobId());
    Task task = job.getTask(taskID);
    TaskAttempt attempt = task.getAttempts().values().iterator().next();
    attempt.getReport().setCounters(null);
   
    WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
View Full Code Here

  /**
   * Test rendering for ConfBlock
   */
  @Test
  public void testConfigurationBlock() throws Exception {
    AppContext ctx = mock(AppContext.class);
    Job job = mock(Job.class);
    Path path = new Path("conf");
    Configuration configuration = new Configuration();
    configuration.set("Key for test", "Value for test");
    when(job.getConfFile()).thenReturn(path);
    when(job.loadConfFile()).thenReturn(configuration);

    when(ctx.getJob(any(JobId.class))).thenReturn(job);


    ConfBlockForTest configurationBlock = new ConfBlockForTest(ctx);
    PrintWriter pWriter = new PrintWriter(data);
    Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
View Full Code Here

    when(task.getType()).thenReturn(TaskType.MAP);


    Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
    tasks.put(taskId, task);
    AppContext ctx = mock(AppContext.class);
    Job job = mock(Job.class);
    when(job.getTasks()).thenReturn(tasks);


    App app = new App(ctx);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.v2.app.AppContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.