Package org.apache.airavata.schemas.gfac

Examples of org.apache.airavata.schemas.gfac.HadoopApplicationDeploymentDescriptionType


    }

    private void handleInPath(JobExecutionContext jobExecutionContext) throws GFacHandlerException, IOException {
        ApplicationDeploymentDescriptionType appDepDesc =
                jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
                (HadoopApplicationDeploymentDescriptionType)appDepDesc;
        if(appDepDesc.isSetInputDataDirectory() && isInputDataDirectoryLocal(appDepDesc)){
            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);
            FileSystem hdfs = FileSystem.get(hadoopConf);
            hdfs.copyFromLocalFile(new Path(appDepDesc.getInputDataDirectory()),
                    new Path(hadoopAppDesc.getHadoopJobConfiguration().getHdfsInputDirectory()));
        }
    }
View Full Code Here


            this.hadoopConfigDir = hadoopConfigDir;
        }
    }

    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
                (HadoopApplicationDeploymentDescriptionType)jobExecutionContext
                        .getApplicationContext().getApplicationDeploymentDescription().getType();
        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
        HadoopApplicationDeploymentDescriptionType.HadoopJobConfiguration jobConf = hadoopAppDesc.getHadoopJobConfiguration();

        try{
            // Preparing Hadoop configuration
            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(
                    jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);

            // Load jar containing map-reduce job implementation
            ArrayList<URL> mapRedJars = new ArrayList<URL>();
            mapRedJars.add(new File(jobConf.getJarLocation()).toURL());
            URLClassLoader childClassLoader = new URLClassLoader(mapRedJars.toArray(new URL[mapRedJars.size()]),
                    this.getClass().getClassLoader());

            Job job = new Job(hadoopConf);

            job.setJobName(jobConf.getJobName());

            job.setOutputKeyClass(Class.forName(jobConf.getOutputKeyClass(), true, childClassLoader));
            job.setOutputValueClass(Class.forName(jobConf.getOutputValueClass(), true, childClassLoader));

            job.setMapperClass((Class<? extends Mapper>)Class.forName(jobConf.getMapperClass(), true, childClassLoader));
            job.setCombinerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
            job.setReducerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));

            job.setInputFormatClass((Class<? extends InputFormat>)Class.forName(jobConf.getInputFormatClass(), true, childClassLoader));
            job.setOutputFormatClass((Class<? extends OutputFormat>) Class.forName(jobConf.getOutputFormatClass(), true, childClassLoader));

            FileInputFormat.setInputPaths(job, new Path(hadoopAppDesc.getInputDataDirectory()));
            FileOutputFormat.setOutputPath(job, new Path(hadoopAppDesc.getOutputDataDirectory()));

            job.waitForCompletion(true);
            System.out.println(job.getTrackingURL());
            if(jobExecutionContext.getOutMessageContext() == null){
                jobExecutionContext.setOutMessageContext(new MessageContext());
View Full Code Here

            this.hadoopConfigDir = hadoopConfigDir;
        }
    }

    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
                (HadoopApplicationDeploymentDescriptionType)jobExecutionContext
                        .getApplicationContext().getApplicationDeploymentDescription().getType();
        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
        HadoopApplicationDeploymentDescriptionType.HadoopJobConfiguration jobConf = hadoopAppDesc.getHadoopJobConfiguration();

        try{
            // Preparing Hadoop configuration
            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(
                    jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);

            // Load jar containing map-reduce job implementation
            ArrayList<URL> mapRedJars = new ArrayList<URL>();
            mapRedJars.add(new File(jobConf.getJarLocation()).toURL());
            URLClassLoader childClassLoader = new URLClassLoader(mapRedJars.toArray(new URL[mapRedJars.size()]),
                    this.getClass().getClassLoader());

            Job job = new Job(hadoopConf);

            job.setJobName(jobConf.getJobName());

            job.setOutputKeyClass(Class.forName(jobConf.getOutputKeyClass(), true, childClassLoader));
            job.setOutputValueClass(Class.forName(jobConf.getOutputValueClass(), true, childClassLoader));

            job.setMapperClass((Class<? extends Mapper>)Class.forName(jobConf.getMapperClass(), true, childClassLoader));
            job.setCombinerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
            job.setReducerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));

            job.setInputFormatClass((Class<? extends InputFormat>)Class.forName(jobConf.getInputFormatClass(), true, childClassLoader));
            job.setOutputFormatClass((Class<? extends OutputFormat>) Class.forName(jobConf.getOutputFormatClass(), true, childClassLoader));

            FileInputFormat.setInputPaths(job, new Path(hadoopAppDesc.getInputDataDirectory()));
            FileOutputFormat.setOutputPath(job, new Path(hadoopAppDesc.getOutputDataDirectory()));

            job.waitForCompletion(true);
            System.out.println(job.getTrackingURL());
            if(jobExecutionContext.getOutMessageContext() == null){
                jobExecutionContext.setOutMessageContext(new MessageContext());
View Full Code Here

    }

    private void handleInPath(JobExecutionContext jobExecutionContext) throws GFacHandlerException, IOException {
        ApplicationDeploymentDescriptionType appDepDesc =
                jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
                (HadoopApplicationDeploymentDescriptionType)appDepDesc;
        if(appDepDesc.isSetInputDataDirectory() && isInputDataDirectoryLocal(appDepDesc)){
            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);
            FileSystem hdfs = FileSystem.get(hadoopConf);
            hdfs.copyFromLocalFile(new Path(appDepDesc.getInputDataDirectory()),
                    new Path(hadoopAppDesc.getHadoopJobConfiguration().getHdfsInputDirectory()));
        }
    }
View Full Code Here

            this.hadoopConfigDir = hadoopConfigDir;
        }
    }

    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
                (HadoopApplicationDeploymentDescriptionType)jobExecutionContext
                        .getApplicationContext().getApplicationDeploymentDescription().getType();
        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
        HadoopApplicationDeploymentDescriptionType.HadoopJobConfiguration jobConf = hadoopAppDesc.getHadoopJobConfiguration();

        try{
            // Preparing Hadoop configuration
            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(
                    jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);

            // Load jar containing map-reduce job implementation
            ArrayList<URL> mapRedJars = new ArrayList<URL>();
            mapRedJars.add(new File(jobConf.getJarLocation()).toURL());
            URLClassLoader childClassLoader = new URLClassLoader(mapRedJars.toArray(new URL[mapRedJars.size()]),
                    this.getClass().getClassLoader());

            Job job = new Job(hadoopConf);

            job.setJobName(jobConf.getJobName());

            job.setOutputKeyClass(Class.forName(jobConf.getOutputKeyClass(), true, childClassLoader));
            job.setOutputValueClass(Class.forName(jobConf.getOutputValueClass(), true, childClassLoader));

            job.setMapperClass((Class<? extends Mapper>)Class.forName(jobConf.getMapperClass(), true, childClassLoader));
            job.setCombinerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
            job.setReducerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));

            job.setInputFormatClass((Class<? extends InputFormat>)Class.forName(jobConf.getInputFormatClass(), true, childClassLoader));
            job.setOutputFormatClass((Class<? extends OutputFormat>) Class.forName(jobConf.getOutputFormatClass(), true, childClassLoader));

            FileInputFormat.setInputPaths(job, new Path(hadoopAppDesc.getInputDataDirectory()));
            FileOutputFormat.setOutputPath(job, new Path(hadoopAppDesc.getOutputDataDirectory()));

            job.waitForCompletion(true);
            System.out.println(job.getTrackingURL());
            if(jobExecutionContext.getOutMessageContext() == null){
                jobExecutionContext.setOutMessageContext(new MessageContext());
View Full Code Here

    }

    private void handleInPath(JobExecutionContext jobExecutionContext) throws GFacHandlerException, IOException {
        ApplicationDeploymentDescriptionType appDepDesc =
                jobExecutionContext.getApplicationContext().getApplicationDeploymentDescription().getType();
        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
                (HadoopApplicationDeploymentDescriptionType)appDepDesc;
        if(appDepDesc.isSetInputDataDirectory() && isInputDataDirectoryLocal(appDepDesc)){
            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);
            FileSystem hdfs = FileSystem.get(hadoopConf);
            hdfs.copyFromLocalFile(new Path(appDepDesc.getInputDataDirectory()),
                    new Path(hadoopAppDesc.getHadoopJobConfiguration().getHdfsInputDirectory()));
        }
    }
View Full Code Here

            this.hadoopConfigDir = hadoopConfigDir;
        }
    }

    public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException {
        HadoopApplicationDeploymentDescriptionType hadoopAppDesc =
                (HadoopApplicationDeploymentDescriptionType)jobExecutionContext
                        .getApplicationContext().getApplicationDeploymentDescription().getType();
        MessageContext inMessageContext = jobExecutionContext.getInMessageContext();
        HadoopApplicationDeploymentDescriptionType.HadoopJobConfiguration jobConf = hadoopAppDesc.getHadoopJobConfiguration();

        try{
            // Preparing Hadoop configuration
            Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(
                    jobExecutionContext, isWhirrBasedDeployment, hadoopConfigDir);

            // Load jar containing map-reduce job implementation
            ArrayList<URL> mapRedJars = new ArrayList<URL>();
            mapRedJars.add(new File(jobConf.getJarLocation()).toURL());
            URLClassLoader childClassLoader = new URLClassLoader(mapRedJars.toArray(new URL[mapRedJars.size()]),
                    this.getClass().getClassLoader());

            Job job = new Job(hadoopConf);

            job.setJobName(jobConf.getJobName());

            job.setOutputKeyClass(Class.forName(jobConf.getOutputKeyClass(), true, childClassLoader));
            job.setOutputValueClass(Class.forName(jobConf.getOutputValueClass(), true, childClassLoader));

            job.setMapperClass((Class<? extends Mapper>)Class.forName(jobConf.getMapperClass(), true, childClassLoader));
            job.setCombinerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));
            job.setReducerClass((Class<? extends Reducer>) Class.forName(jobConf.getCombinerClass(), true, childClassLoader));

            job.setInputFormatClass((Class<? extends InputFormat>)Class.forName(jobConf.getInputFormatClass(), true, childClassLoader));
            job.setOutputFormatClass((Class<? extends OutputFormat>) Class.forName(jobConf.getOutputFormatClass(), true, childClassLoader));

            FileInputFormat.setInputPaths(job, new Path(hadoopAppDesc.getInputDataDirectory()));
            FileOutputFormat.setOutputPath(job, new Path(hadoopAppDesc.getOutputDataDirectory()));

            job.waitForCompletion(true);
            System.out.println(job.getTrackingURL());
            if(jobExecutionContext.getOutMessageContext() == null){
                jobExecutionContext.setOutMessageContext(new MessageContext());
View Full Code Here

TOP

Related Classes of org.apache.airavata.schemas.gfac.HadoopApplicationDeploymentDescriptionType

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.