Package cascading.stats.hadoop

Examples of cascading.stats.hadoop.HadoopStepStats


   * @param flowStep the step in the flow that represents the MapReduce job
   */
  @Override
  public void onStepStarting(FlowStep flowStep) {
    //getting Hadoop job client
    HadoopStepStats stats = (HadoopStepStats)((HadoopFlowStep)flowStep).getFlowStepStats();
    String assignedJobId = stats.getJobID();
    String jobName = flowStep.getName();
    JobClient jc = stats.getJobClient();

    runningJobs++; //update overall progress

    DAGNode<CascadingJob> node = this.dagNodeNameMap.get(jobName);
    if (node == null) {
View Full Code Here


   *
   * @param flowStep the step in the flow that represents the MapReduce job
   */
  @Override
  public void onStepCompleted(FlowStep flowStep) {
       HadoopStepStats stats = (HadoopStepStats)flowStep.getFlowStepStats();
       String jobId = stats.getJobID();

      //get job node
      DAGNode<CascadingJob> node = dagNodeJobIdMap.get(jobId);
      if (node == null) {
        log.warn("Unrecognized jobId reported for succeeded job: " + stats.getJobID());
        return;
      }
      mapReduceHelper.addMapReduceJobState(node.getJob(), stats.getJobClient());
      addCompletedJobStats(node.getJob(), stats);
      AmbroseUtils.pushEvent(statsWriteService, currentFlowId, new Event.JobFinishedEvent(node));
  }
View Full Code Here

   * @param flowStep the step in the flow that represents the MapReduce job
   * @param throwable  the exception that caused the job to fail
   */
  @Override
  public boolean onStepThrowable(FlowStep flowStep , Throwable throwable) {
    HadoopStepStats stats = (HadoopStepStats)flowStep.getFlowStepStats();
    String jobName = flowStep.getName();

    //get job node
    DAGNode<CascadingJob> node = dagNodeNameMap.get(jobName);
    if (node == null) {
      log.warn("Unrecognized jobId reported for succeeded job: " + stats.getJobID());
      return false;
    }
    mapReduceHelper.addMapReduceJobState(node.getJob(), stats.getJobClient());
    addCompletedJobStats(node.getJob(), stats);
    AmbroseUtils.pushEvent(statsWriteService, currentFlowId, new Event.JobFailedEvent(node));
    return false;
  }
View Full Code Here

   * @param flowStep the step in the flow that represents the MapReduce job
   */
  @Override
  public void onStepRunning(FlowStep flowStep) {
    //getting Hadoop running job and job client
    HadoopStepStats stats = (HadoopStepStats)flowStep.getFlowStepStats();
    JobClient jc = stats.getJobClient();

    // first we report the scripts progress
    int progress = (int) (((runningJobs * 1.0) / totalNumberOfJobs) * 100);
    AmbroseUtils.pushWorkflowProgressEvent(statsWriteService, currentFlowId, progress);

    //get job node
    String jobId = stats.getJobID();
    DAGNode<CascadingJob> node = dagNodeJobIdMap.get(jobId);
    if (node == null) {
      log.warn("Unrecognized jobId reported for succeeded job: " + stats.getJobID());
      return;
    }
   
    //only push job progress events for a completed job once
    if(completedJobIds.contains(node.getJob().getId())) {
View Full Code Here

    }

  @Override
  protected FlowStepStats createStepStats( ClientState clientState )
    {
    return new HadoopStepStats( flowStep, clientState )
    {
    @Override
    public JobClient getJobClient()
      {
      return jobClient;
View Full Code Here

TOP

Related Classes of cascading.stats.hadoop.HadoopStepStats

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.