Package org.apache.oozie.client

Examples of org.apache.oozie.client.WorkflowAction


    }

    private RunningJob submitAction(Context context) throws Exception {
        PigActionExecutor ae = new PigActionExecutor();

        WorkflowAction action = context.getAction();

        ae.prepareActionDir(getFileSystem(), context);
        ae.submitLauncher(getFileSystem(), context, action);

        String jobId = action.getExternalId();
        String jobTracker = action.getTrackerUri();
        String consoleUrl = action.getConsoleUrl();
        assertNotNull(jobId);
        assertNotNull(jobTracker);
        assertNotNull(consoleUrl);

        Element e = XmlUtils.parseXml(action.getConf());
        XConfiguration conf =
                new XConfiguration(new StringReader(XmlUtils.prettyPrint(e.getChild("configuration")).toString()));
        conf.set("mapred.job.tracker", e.getChildTextTrim("job-tracker"));
        conf.set("fs.default.name", e.getChildTextTrim("name-node"));
        conf.set("mapreduce.framework.name", "yarn");
View Full Code Here


        evaluateLauncherJob(launcherJob);
        assertTrue(launcherJob.isSuccessful());
        assertTrue(LauncherMapper.hasStatsData(launcherJob));

        PigActionExecutor ae = new PigActionExecutor();
        WorkflowAction wfAction = context.getAction();
        ae.check(context, wfAction);
        ae.end(context, wfAction);

        assertEquals("SUCCEEDED", wfAction.getExternalStatus());
        String stats = wfAction.getStats();
        assertNotNull(stats);
        // check for some of the expected key values in the stats
        Map m = (Map)JSONValue.parse(stats);
        // check for expected 1st level JSON keys
        assertTrue(m.containsKey("PIG_VERSION"));

        String expectedChildIDs = wfAction.getExternalChildIDs();
        String[] childIDs = expectedChildIDs.split(",");
        assertTrue(m.containsKey(childIDs[0]));

        Map q = (Map)m.get(childIDs[0]);
        // check for expected 2nd level JSON keys
View Full Code Here

        final RunningJob launcherJob = submitAction(context);
        evaluateLauncherJob(launcherJob);
        assertTrue(launcherJob.isSuccessful());

        PigActionExecutor ae = new PigActionExecutor();
        WorkflowAction wfAction = context.getAction();
        ae.check(context, wfAction);
        ae.end(context, wfAction);

        assertEquals("SUCCEEDED", wfAction.getExternalStatus());
        String externalIds = wfAction.getExternalChildIDs();
        assertNotNull(externalIds);
        assertNotSame("", externalIds);
        // check for the expected prefix of hadoop jobIDs
        assertTrue(externalIds.contains("job_"));
View Full Code Here

        final RunningJob launcherJob = submitAction(context);
        evaluateLauncherJob(launcherJob);
        assertTrue(launcherJob.isSuccessful());

        PigActionExecutor ae = new PigActionExecutor();
        WorkflowAction wfAction = context.getAction();
        ae.check(context, wfAction);
        ae.end(context, wfAction);

        // action should fail as the size of pig stats will always be greater
        // than 1 byte
        assertEquals("FAILED/KILLED", wfAction.getExternalStatus());
        assertNull(wfAction.getStats());
    }
View Full Code Here

        evaluateLauncherJob(launcherJob);
        assertTrue(launcherJob.isSuccessful());
        assertFalse(LauncherMapper.hasStatsData(launcherJob));

        PigActionExecutor ae = new PigActionExecutor();
        WorkflowAction wfAction = context.getAction();
        ae.check(context, wfAction);
        ae.end(context, wfAction);

        assertEquals("SUCCEEDED", wfAction.getExternalStatus());
        assertNotNull(wfAction.getExternalChildIDs());
    }
View Full Code Here

                "<chmod path=''{5}'' permissions=''-rwxrwx---'' dir-files=''false''/>" +
                "</fs>", mkdir, delete, source, target, chmod1, chmod2);


        Context context = createContext(actionXml);
        WorkflowAction action = context.getAction();

        assertFalse(fs.exists(ae.getRecoveryPath(context)));

        ae.start(context, action);
View Full Code Here

        Context context = createContext(actionXml);
        ((WorkflowJobBean) context.getWorkflow()).setId(id);
        ((WorkflowActionBean) context.getWorkflow().getActions().get(0)).setJobId(id);
        ((WorkflowActionBean) context.getWorkflow().getActions().get(0)).setId(id + "-FS");

        WorkflowAction action = context.getAction();

        assertFalse(fs.exists(ae.getRecoveryPath(context)));

        try {
            ae.start(context, action);
View Full Code Here


    protected RunningJob submitAction(Context context) throws Exception {
        DistcpActionExecutor ae = new DistcpActionExecutor();

        WorkflowAction action = context.getAction();

        ae.prepareActionDir(getFileSystem(), context);
        ae.submitLauncher(getFileSystem(), context, action);

        String jobId = action.getExternalId();
        String jobTracker = action.getTrackerUri();
        String consoleUrl = action.getConsoleUrl();
        assertNotNull(jobId);
        assertNotNull(jobTracker);
        assertNotNull(consoleUrl);

        JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(jobTracker);
View Full Code Here

    }

    protected RunningJob submitAction(Context context) throws Exception {
        JavaActionExecutor ae = new JavaActionExecutor();

        WorkflowAction action = context.getAction();

        ae.prepareActionDir(getFileSystem(), context);
        ae.submitLauncher(getFileSystem(), context, action);

        String jobId = action.getExternalId();
        String jobTracker = action.getTrackerUri();
        String consoleUrl = action.getConsoleUrl();
        assertNotNull(jobId);
        assertNotNull(jobTracker);
        assertNotNull(consoleUrl);

        JobConf jobConf = Services.get().get(HadoopAccessorService.class).createJobConf(jobTracker);
View Full Code Here

    }

    private RunningJob submitAction(Context context) throws Exception {
        HiveActionExecutor ae = new HiveActionExecutor();

        WorkflowAction action = context.getAction();

        ae.prepareActionDir(getFileSystem(), context);
        ae.submitLauncher(getFileSystem(), context, action);

        String jobId = action.getExternalId();
        String jobTracker = action.getTrackerUri();
        String consoleUrl = action.getConsoleUrl();
        assertNotNull(jobId);
        assertNotNull(jobTracker);
        assertNotNull(consoleUrl);
        Element e = XmlUtils.parseXml(action.getConf());
        Namespace ns = Namespace.getNamespace("uri:oozie:hive-action:0.2");
        XConfiguration conf =
                new XConfiguration(new StringReader(XmlUtils.prettyPrint(e.getChild("configuration", ns)).toString()));
        conf.set("mapred.job.tracker", e.getChildTextTrim("job-tracker", ns));
        conf.set("fs.default.name", e.getChildTextTrim("name-node", ns));
View Full Code Here

TOP

Related Classes of org.apache.oozie.client.WorkflowAction

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.