Package org.apache.oozie.command.wf.ActionXCommand

Examples of org.apache.oozie.command.wf.ActionXCommand.ActionExecutorContext


        WorkflowActionGetJPAExecutor wfActionGetCmd = new WorkflowActionGetJPAExecutor(action.getId());

        new ActionStartXCommand(action.getId(), "map-reduce").call();
        action = jpaService.execute(wfActionGetCmd);

        ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(job, action, false, false);
        MapReduceActionExecutor actionExecutor = new MapReduceActionExecutor();
        JobConf conf = actionExecutor.createBaseHadoopConf(context, XmlUtils.parseXml(action.getConf()));
        String user = conf.get("user.name");
        String group = conf.get("group.name");
        JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);
View Full Code Here


            if (wfAction != null) { // wfAction could be a no-op job
                NodeDef nodeDef = workflowInstance.getNodeDef(wfAction.getExecutionPath());
                if (nodeDef != null && nodeDef instanceof KillNodeDef) {
                    boolean isRetry = false;
                    boolean isUserRetry = false;
                    ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(wfJob, wfAction, isRetry,
                            isUserRetry);
                    try {
                        String tmpNodeConf = nodeDef.getConf();
                        String actionConf = context.getELEvaluator().evaluate(tmpNodeConf, String.class);
                        LOG.debug("Try to resolve KillNode message for jobid [{0}], actionId [{1}], before resolve [{2}], after resolve [{3}]",
                                        jobId, actionId, tmpNodeConf, actionConf);
                        if (wfAction.getErrorCode() != null) {
                            wfAction.setErrorInfo(wfAction.getErrorCode(), actionConf);
                        }
View Full Code Here

        });
        action = jpaService.execute(wfActionGetCmd);
        assertNotNull(action.getExternalId());
        assertEquals(WorkflowAction.Status.RUNNING, action.getStatus());

        ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(job, action, false, false);
        MapReduceActionExecutor actionExecutor = new MapReduceActionExecutor();
        JobConf conf = actionExecutor.createBaseHadoopConf(context, XmlUtils.parseXml(action.getConf()));
        String user = conf.get("user.name");
        String group = conf.get("group.name");
        JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);
View Full Code Here

        new ActionStartXCommand(action.getId(), "map-reduce").call();
        action = jpaService.execute(wfActionGetCmd);
        assertNotNull(action.getExternalId());

        ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(job, action, false, false);
        MapReduceActionExecutor actionExecutor = new MapReduceActionExecutor();
        JobConf conf = actionExecutor.createBaseHadoopConf(context, XmlUtils.parseXml(action.getConf()));
        String user = conf.get("user.name");
        String group = conf.get("group.name");
        JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);
View Full Code Here

        // start workflow action
        new ActionStartXCommand(action.getId(), "map-reduce").call();
        action = jpaService.execute(wfActionGetCmd);
        assertNotNull(action.getExternalId());

        ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(job, action, false, false);
        MapReduceActionExecutor actionExecutor = new MapReduceActionExecutor();
        JobConf conf = actionExecutor.createBaseHadoopConf(context, XmlUtils.parseXml(action.getConf()));
        String user = conf.get("user.name");
        String group = conf.get("group.name");
        JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);
View Full Code Here

            if (wfAction != null) { // wfAction could be a no-op job
                NodeDef nodeDef = workflowInstance.getNodeDef(wfAction.getExecutionPath());
                if (nodeDef != null && nodeDef instanceof KillNodeDef) {
                    boolean isRetry = false;
                    boolean isUserRetry = false;
                    ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(wfJob, wfAction, isRetry,
                            isUserRetry);
                    try {
                        String tmpNodeConf = nodeDef.getConf();
                        String actionConf = context.getELEvaluator().evaluate(tmpNodeConf, String.class);
                        LOG.debug("Try to resolve KillNode message for jobid [{0}], actionId [{1}], before resolve [{2}], after resolve [{3}]",
                                        jobId, actionId, tmpNodeConf, actionConf);
                        if (wfAction.getErrorCode() != null) {
                            wfAction.setErrorInfo(wfAction.getErrorCode(), actionConf);
                        }
View Full Code Here

        WorkflowActionGetJPAExecutor wfActionGetCmd = new WorkflowActionGetJPAExecutor(action.getId());

        new ActionStartXCommand(action.getId(), "map-reduce").call();
        action = jpaService.execute(wfActionGetCmd);

        ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(job, action, false, false);
        MapReduceActionExecutor actionExecutor = new MapReduceActionExecutor();
        JobConf conf = actionExecutor.createBaseHadoopConf(context, XmlUtils.parseXml(action.getConf()));
        String user = conf.get("user.name");
        String group = conf.get("group.name");
        JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);
View Full Code Here

        // It should now continue and finish with SUCCEEDED
        new ResumeXCommand(jobId).call();
        WorkflowJobBean job2 = jpaService.execute(new WorkflowJobGetJPAExecutor(jobId));
        assertEquals("RUNNING", job2.getStatusStr());

        ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(job2, action1, false, false);
        WorkflowActionBean action2 = jpaService.execute(wfActionGetCmd);
        MapReduceActionExecutor actionExecutor = new MapReduceActionExecutor();
        JobConf conf = actionExecutor.createBaseHadoopConf(context, XmlUtils.parseXml(action2.getConf()));
        String user = conf.get("user.name");
        JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);
View Full Code Here

        new ActionStartXCommand(actionId, "map-reduce").call();
        final WorkflowActionBean action1 = jpaService.execute(wfActionGetCmd);
        String originalLauncherId = action1.getExternalId();

        ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(job0, action1, false, false);
        MapReduceActionExecutor actionExecutor = new MapReduceActionExecutor();
        JobConf conf = actionExecutor.createBaseHadoopConf(context, XmlUtils.parseXml(action1.getConf()));
        String user = conf.get("user.name");
        JobClient jobClient = Services.get().get(HadoopAccessorService.class).createJobClient(user, conf);
View Full Code Here

                            if (!action.getType().equals(StartActionExecutor.TYPE) &&       // The control actions have invalid
                                !action.getType().equals(ForkActionExecutor.TYPE) &&        // action dir paths because they
                                !action.getType().equals(JoinActionExecutor.TYPE) &&        // contain ":" (colons)
                                !action.getType().equals(KillActionExecutor.TYPE) &&
                                !action.getType().equals(EndActionExecutor.TYPE)) {
                                ActionExecutorContext context =
                                        new ActionXCommand.ActionExecutorContext(workflow, action, false, false);
                                if (context.getAppFileSystem().exists(context.getActionDir())) {
                                    context.getAppFileSystem().delete(context.getActionDir(), true);
                                }
                            }
                            queue(new ActionStartXCommand(action.getId(), action.getType()));
                        }
                        else {
View Full Code Here

TOP

Related Classes of org.apache.oozie.command.wf.ActionXCommand.ActionExecutorContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.