Package org.apache.hadoop.mapred

Examples of org.apache.hadoop.mapred.JobClient


      int numMaps = finalPathList.size();
      if (numMaps > MAX_NUM_MAPS) {
        numMaps = MAX_NUM_MAPS;
      }

      JobClient client = new JobClient(jobConf);
      jobConf.setNumMapTasks(getMapCount(numMaps, totalBytes, client));
     
      for(int idx=0; idx < numMaps; ++idx) {
        Path file = new Path(inDir, "part"+idx);
        SequenceFile.Writer writer =
View Full Code Here


      jobConf.setOutputValueClass(Text.class);
      jobConf.setOutputFormat(SequenceFileOutputFormat.class);
     
      jobConf.setMapperClass(HTTPCopyFilesMapper.class);
     
      JobClient client = new JobClient(jobConf);
      jobConf.setNumMapTasks(getMapCount(srcPaths.length, -1, client));
     
      jobConf.setBoolean(readFailuresAttribute, ignoreReadFailures);
     
      FileSystem fileSystem = FileSystem.get(conf);
View Full Code Here

    }

    jobConf.setInt("distcp.file.count", cnfiles);
    jobConf.setLong("distcp.total.size", cbsize);

    JobClient client = new JobClient(jobConf);
    jobConf.setNumMapTasks(getMapCount(cbsize,
          client.getClusterStatus().getTaskTrackers()));
  }
View Full Code Here

   */
  public ClusterStatus getClusterStatus() throws Exception {
    ClusterStatus cs;
    try {
      JobConf job = new JobConf(conf, ExecDriver.class);
      JobClient jc = new JobClient(job);
      cs = jc.getClusterStatus();
    } catch (Exception e) {
      e.printStackTrace();
      throw e;
    }
    LOG.info("Returning cluster status: " + cs.toString());
View Full Code Here

        ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(job, action, false, false);
        MapReduceActionExecutor actionExecutor = new MapReduceActionExecutor();
        JobConf conf = actionExecutor.createBaseHadoopConf(context, XmlUtils.parseXml(action.getConf()));
        String user = conf.get("user.name");
        String group = conf.get("group.name");
        JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);

        String launcherId = action.getExternalId();

        final RunningJob launcherJob = jobClient.getJob(JobID.forName(launcherId));

        waitFor(120 * 1000, new Predicate() {
            public boolean evaluate() throws Exception {
                return launcherJob.isComplete();
            }
        });
        assertTrue(launcherJob.isSuccessful());
        assertTrue(LauncherMapper.hasIdSwap(launcherJob));

        new ActionCheckXCommand(action.getId()).call();
        action = jpaService.execute(wfActionGetCmd);
        String mapperId = action.getExternalId();

        assertFalse(launcherId.equals(mapperId));

        final RunningJob mrJob = jobClient.getJob(JobID.forName(mapperId));

        waitFor(120 * 1000, new Predicate() {
            public boolean evaluate() throws Exception {
                return mrJob.isComplete();
            }
View Full Code Here

        JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(jobTracker);
        XConfiguration.copy(conf, jobConf);
        String user = jobConf.get("user.name");
        String group = jobConf.get("group.name");
        JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, jobConf);
        final RunningJob runningJob = jobClient.getJob(JobID.forName(jobId));
        assertNotNull(runningJob);
        return runningJob;
    }
View Full Code Here

        stats.setPhysicalPlan(php);
       
        ExecutionEngine exe = pc.getExecutionEngine();
        ConfigurationValidator.validatePigProperties(exe.getConfiguration());
        Configuration conf = ConfigurationUtil.toConfiguration(exe.getConfiguration());
        JobClient jobClient = new JobClient(((HExecutionEngine)exe).getJobConf());

        JobControlCompiler jcc = new JobControlCompiler(pc, conf);
       
        List<Job> failedJobs = new LinkedList<Job>();
        List<Job> completeFailedJobsInThisRun = new LinkedList<Job>();
View Full Code Here

        assertNotNull(consoleUrl);

        JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(jobTracker);
        jobConf.set("mapred.job.tracker", jobTracker);

        JobClient jobClient =
            Services.get().get(HadoopAccessorService.class).createJobClient(getTestUser(), jobConf);
        final RunningJob runningJob = jobClient.getJob(JobID.forName(jobId));
        assertNotNull(runningJob);
        return runningJob;
    }
View Full Code Here

    void injectLauncherCallback(Context context, Configuration launcherConf) {
        injectCallback(context, launcherConf);
    }

    public void submitLauncher(FileSystem actionFs, Context context, WorkflowAction action) throws ActionExecutorException {
        JobClient jobClient = null;
        boolean exception = false;
        try {
            Path appPathRoot = new Path(context.getWorkflow().getAppPath());

            // app path could be a file
            if (actionFs.isFile(appPathRoot)) {
                appPathRoot = appPathRoot.getParent();
            }

            Element actionXml = XmlUtils.parseXml(action.getConf());

            // action job configuration
            Configuration actionConf = createBaseHadoopConf(context, actionXml);
            setupActionConf(actionConf, context, actionXml, appPathRoot);
            XLog.getLog(getClass()).debug("Setting LibFilesArchives ");
            setLibFilesArchives(context, actionXml, appPathRoot, actionConf);
            String jobName = XLog.format("oozie:action:T={0}:W={1}:A={2}:ID={3}", getType(), context.getWorkflow()
                    .getAppName(), action.getName(), context.getWorkflow().getId());
            actionConf.set("mapred.job.name", jobName);
            injectActionCallback(context, actionConf);

            if (context.getWorkflow().getAcl() != null) {
                // setting the group owning the Oozie job to allow anybody in that
                // group to kill the jobs.
                actionConf.set("mapreduce.job.acl-modify-job", context.getWorkflow().getAcl());
            }

            // Setting the credential properties in launcher conf
            HashMap<String, CredentialsProperties> credentialsProperties = setCredentialPropertyToActionConf(context,
                    action, actionConf);

            // Adding if action need to set more credential tokens
            JobConf credentialsConf = new JobConf(false);
            XConfiguration.copy(actionConf, credentialsConf);
            setCredentialTokens(credentialsConf, context, action, credentialsProperties);

            // insert conf to action conf from credentialsConf
            for (Entry<String, String> entry : credentialsConf) {
                if (actionConf.get(entry.getKey()) == null) {
                    actionConf.set(entry.getKey(), entry.getValue());
                }
            }

            JobConf launcherJobConf = createLauncherConf(actionFs, context, action, actionXml, actionConf);
            injectLauncherCallback(context, launcherJobConf);
            XLog.getLog(getClass()).debug("Creating Job Client for action " + action.getId());
            jobClient = createJobClient(context, launcherJobConf);
            String launcherId = LauncherMapper.getRecoveryId(launcherJobConf, context.getActionDir(), context
                    .getRecoveryId());
            boolean alreadyRunning = launcherId != null;
            RunningJob runningJob;

            // if user-retry is on, always submit new launcher
            boolean isUserRetry = ((WorkflowActionBean)action).isUserRetry();

            if (alreadyRunning && !isUserRetry) {
                runningJob = jobClient.getJob(JobID.forName(launcherId));
                if (runningJob == null) {
                    String jobTracker = launcherJobConf.get("mapred.job.tracker");
                    throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA017",
                            "unknown job [{0}@{1}], cannot recover", launcherId, jobTracker);
                }
            }
            else {
                XLog.getLog(getClass()).debug("Submitting the job through Job Client for action " + action.getId());

                // setting up propagation of the delegation token.
                Token<DelegationTokenIdentifier> mrdt = jobClient.getDelegationToken(new Text("mr token"));
                launcherJobConf.getCredentials().addToken(new Text("mr token"), mrdt);

                // insert credentials tokens to launcher job conf if needed
                if (needInjectCredentials()) {
                    for (Token<? extends TokenIdentifier> tk : credentialsConf.getCredentials().getAllTokens()) {
                        log.debug("ADDING TOKEN: " + tk.getKind().toString());
                        launcherJobConf.getCredentials().addToken(tk.getKind(), tk);
                    }
                }
                else {
                    log.info("No need to inject credentials.");
                }
                runningJob = jobClient.submitJob(launcherJobConf);
                if (runningJob == null) {
                    throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA017",
                            "Error submitting launcher for action [{0}]", action.getId());
                }
                launcherId = runningJob.getID().toString();
                XLog.getLog(getClass()).debug("After submission get the launcherId " + launcherId);
            }

            String jobTracker = launcherJobConf.get(HADOOP_JOB_TRACKER);
            String consoleUrl = runningJob.getTrackingURL();
            context.setStartData(launcherId, jobTracker, consoleUrl);
        }
        catch (Exception ex) {
            exception = true;
            throw convertException(ex);
        }
        finally {
            if (jobClient != null) {
                try {
                    jobClient.close();
                }
                catch (Exception e) {
                    if (exception) {
                        log.error("JobClient error: ", e);
                    }
View Full Code Here

        return Services.get().get(HadoopAccessorService.class).createJobClient(user, jobConf);
    }

    @Override
    public void check(Context context, WorkflowAction action) throws ActionExecutorException {
        JobClient jobClient = null;
        boolean exception = false;
        try {
            Element actionXml = XmlUtils.parseXml(action.getConf());
            FileSystem actionFs = getActionFileSystem(context, actionXml);
            JobConf jobConf = createBaseHadoopConf(context, actionXml);
            jobClient = createJobClient(context, jobConf);
            RunningJob runningJob = jobClient.getJob(JobID.forName(action.getExternalId()));
            if (runningJob == null) {
                context.setExternalStatus(FAILED);
                context.setExecutionData(FAILED, null);
                throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "JA017",
                        "Unknown hadoop job [{0}] associated with action [{1}].  Failing this action!", action
                                .getExternalId(), action.getId());
            }
            if (runningJob.isComplete()) {
                Path actionDir = context.getActionDir();

                String user = context.getWorkflow().getUser();
                String group = context.getWorkflow().getGroup();
                if (LauncherMapper.hasIdSwap(runningJob, user, group, actionDir)) {
                    String launcherId = action.getExternalId();
                    Path idSwapPath = LauncherMapper.getIdSwapPath(context.getActionDir());
                    InputStream is = actionFs.open(idSwapPath);
                    BufferedReader reader = new BufferedReader(new InputStreamReader(is));
                    Properties props = PropertiesUtils.readProperties(reader, maxActionOutputLen);
                    reader.close();
                    String newId = props.getProperty("id");
                    runningJob = jobClient.getJob(JobID.forName(newId));
                    if (runningJob == null) {
                        context.setExternalStatus(FAILED);
                        throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "JA017",
                                "Unknown hadoop job [{0}] associated with action [{1}].  Failing this action!", newId,
                                action.getId());
                    }

                    context.setStartData(newId, action.getTrackerUri(), runningJob.getTrackingURL());
                    XLog.getLog(getClass()).info(XLog.STD, "External ID swap, old ID [{0}] new ID [{1}]", launcherId,
                            newId);
                }
                if (runningJob.isComplete()) {
                    XLog.getLog(getClass()).info(XLog.STD, "action completed, external ID [{0}]",
                            action.getExternalId());
                    if (runningJob.isSuccessful() && LauncherMapper.isMainSuccessful(runningJob)) {
                        getActionData(actionFs, runningJob, action, context);
                        XLog.getLog(getClass()).info(XLog.STD, "action produced output");
                    }
                    else {
                        XLog log = XLog.getLog(getClass());
                        String errorReason;
                        Path actionError = LauncherMapper.getErrorPath(context.getActionDir());
                        if (actionFs.exists(actionError)) {
                            InputStream is = actionFs.open(actionError);
                            BufferedReader reader = new BufferedReader(new InputStreamReader(is));
                            Properties props = PropertiesUtils.readProperties(reader, -1);
                            reader.close();
                            String errorCode = props.getProperty("error.code");
                            if (errorCode.equals("0")) {
                                errorCode = "JA018";
                            }
                            if (errorCode.equals("-1")) {
                                errorCode = "JA019";
                            }
                            errorReason = props.getProperty("error.reason");
                            log.warn("Launcher ERROR, reason: {0}", errorReason);
                            String exMsg = props.getProperty("exception.message");
                            String errorInfo = (exMsg != null) ? exMsg : errorReason;
                            context.setErrorInfo(errorCode, errorInfo);
                            String exStackTrace = props.getProperty("exception.stacktrace");
                            if (exMsg != null) {
                                log.warn("Launcher exception: {0}{E}{1}", exMsg, exStackTrace);
                            }
                        }
                        else {
                            errorReason = XLog.format("LauncherMapper died, check Hadoop log for job [{0}:{1}]", action
                                    .getTrackerUri(), action.getExternalId());
                            log.warn(errorReason);
                        }
                        context.setExecutionData(FAILED_KILLED, null);
                    }
                }
                else {
                    context.setExternalStatus(RUNNING);
                    XLog.getLog(getClass()).info(XLog.STD, "checking action, external ID [{0}] status [{1}]",
                            action.getExternalId(), action.getExternalStatus());
                }
            }
            else {
                context.setExternalStatus(RUNNING);
                XLog.getLog(getClass()).info(XLog.STD, "checking action, external ID [{0}] status [{1}]",
                        action.getExternalId(), action.getExternalStatus());
            }
        }
        catch (Exception ex) {
            XLog.getLog(getClass()).warn("Exception in check(). Message[{0}]", ex.getMessage(), ex);
            exception = true;
            throw convertException(ex);
        }
        finally {
            if (jobClient != null) {
                try {
                    jobClient.close();
                }
                catch (Exception e) {
                    if (exception) {
                        log.error("JobClient error: ", e);
                    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.JobClient

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.