Package org.apache.sqoop.job

Examples of org.apache.sqoop.job.PrefixContext


    String extractorName = conf.get(JobConstants.JOB_ETL_EXTRACTOR);
    Extractor extractor = (Extractor) ClassUtils.instantiate(extractorName);

    // Objects that should be pass to the Executor execution
    PrefixContext subContext = null;
    Object configConnection = null;
    Object configJob = null;

    // Propagate connector schema in every case for now
    // TODO: Change to coditional choosing between HIO and Connector schema
    Schema schema = ConfigurationUtils.getConnectorSchema(conf);

    // Executor is in connector space for IMPORT and in framework space for EXPORT
    switch (ConfigurationUtils.getJobType(conf)) {
      case IMPORT:
        subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
        configConnection = ConfigurationUtils.getConfigConnectorConnection(conf);
        configJob = ConfigurationUtils.getConfigConnectorJob(conf);
        break;
      case EXPORT:
        subContext = new PrefixContext(conf, "");
        configConnection = ConfigurationUtils.getConfigFrameworkConnection(conf);
        configJob = ConfigurationUtils.getConfigFrameworkJob(conf);
        break;
      default:
        throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0023);
View Full Code Here


      LOG.info("Skipping running destroyer as non was defined.");
      return;
    }

    // Objects that should be pass to the Destroyer execution
    PrefixContext subContext = new PrefixContext(configuration, JobConstants.PREFIX_CONNECTOR_CONTEXT);
    Object configConnection = ConfigurationUtils.getConfigConnectorConnection(configuration);
    Object configJob = ConfigurationUtils.getConfigConnectorJob(configuration);

    // Propagate connector schema in every case for now
    // TODO: Change to coditional choosing between HIO and Connector schema
View Full Code Here

    Configuration conf = context.getConfiguration();

    String partitionerName = conf.get(JobConstants.JOB_ETL_PARTITIONER);
    Partitioner partitioner = (Partitioner) ClassUtils.instantiate(partitionerName);

    PrefixContext connectorContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
    Object connectorConnection = ConfigurationUtils.getConfigConnectorConnection(conf);
    Object connectorJob = ConfigurationUtils.getConfigConnectorJob(conf);
    Schema schema = ConfigurationUtils.getConnectorSchema(conf);

    long maxPartitions = conf.getLong(JobConstants.JOB_ETL_EXTRACTOR_NUM, 10);
View Full Code Here

          loaderName = conf.get(JobConstants.JOB_ETL_LOADER);
        }
        Loader loader = (Loader) ClassUtils.instantiate(loaderName);

        // Objects that should be pass to the Executor execution
        PrefixContext subContext = null;
        Object configConnection = null;
        Object configJob = null;
        Schema schema = null;

        if (!isTest) {
          // Propagate connector schema in every case for now
          // TODO: Change to coditional choosing between HIO and Connector schema
          schema = ConfigurationUtils.getConnectorSchema(conf);

          switch (ConfigurationUtils.getJobType(conf)) {
            case EXPORT:
              subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
              configConnection = ConfigurationUtils.getConfigConnectorConnection(conf);
              configJob = ConfigurationUtils.getConfigConnectorJob(conf);
              break;
            case IMPORT:
              subContext = new PrefixContext(conf, "");
              configConnection = ConfigurationUtils.getConfigFrameworkConnection(conf);
              configJob = ConfigurationUtils.getConfigFrameworkJob(conf);
              break;
            default:
              throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0023);
View Full Code Here

    Configuration conf = context.getConfiguration();

    String partitionerName = conf.get(JobConstants.JOB_ETL_PARTITIONER);
    Partitioner partitioner = (Partitioner) ClassUtils.instantiate(partitionerName);

    PrefixContext connectorContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
    Object connectorConnection = ConfigurationUtils.getConnectorConnection(conf);
    Object connectorJob = ConfigurationUtils.getConnectorJob(conf);

    long maxPartitions = conf.getLong(JobConstants.JOB_ETL_EXTRACTOR_NUM, 10);
View Full Code Here

        loaderName = conf.get(JobConstants.JOB_ETL_LOADER);
      }
      Loader loader = (Loader) ClassUtils.instantiate(loaderName);

      // Objects that should be pass to the Executor execution
      PrefixContext subContext = null;
      Object configConnection = null;
      Object configJob = null;

      if (!isTest) {
        switch (ConfigurationUtils.getJobType(conf)) {
          case EXPORT:
            subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
            configConnection = ConfigurationUtils.getConnectorConnection(conf);
            configJob = ConfigurationUtils.getConnectorJob(conf);
            break;
          case IMPORT:
            subContext = new PrefixContext(conf, "");
            configConnection = ConfigurationUtils.getFrameworkConnection(conf);
            configJob = ConfigurationUtils.getFrameworkJob(conf);
            break;
          default:
            readerFinished = true;
View Full Code Here

    String extractorName = conf.get(JobConstants.JOB_ETL_EXTRACTOR);
    Extractor extractor = (Extractor) ClassUtils.instantiate(extractorName);

    // Objects that should be pass to the Executor execution
    PrefixContext subContext = null;
    Object configConnection = null;
    Object configJob = null;

    // Executor is in connector space for IMPORT and in framework space for EXPORT
    switch (ConfigurationUtils.getJobType(conf)) {
      case IMPORT:
        subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
        configConnection = ConfigurationUtils.getConnectorConnection(conf);
        configJob = ConfigurationUtils.getConnectorJob(conf);
        break;
      case EXPORT:
        subContext = new PrefixContext(conf, "");
        configConnection = ConfigurationUtils.getFrameworkConnection(conf);
        configJob = ConfigurationUtils.getFrameworkJob(conf);
        break;
      default:
        throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0023);
View Full Code Here

      LOG.info("Skipping running destroyer as non was defined.");
      return;
    }

    // Objects that should be pass to the Destroyer execution
    PrefixContext subContext = new PrefixContext(configuration, JobConstants.PREFIX_CONNECTOR_CONTEXT);
    Object configConnection = ConfigurationUtils.getConnectorConnection(configuration);
    Object configJob = ConfigurationUtils.getConnectorJob(configuration);

    LOG.info("Executing destroyer class " + destroyer.getClass());
    destroyer.destroy(success, subContext, configConnection, configJob);
View Full Code Here

    String extractorName = conf.get(JobConstants.JOB_ETL_EXTRACTOR);
    Extractor extractor = (Extractor) ClassUtils.instantiate(extractorName);

    // Objects that should be pass to the Executor execution
    PrefixContext subContext = null;
    Object configConnection = null;
    Object configJob = null;

    // Executor is in connector space for IMPORT and in framework space for EXPORT
    switch (ConfigurationUtils.getJobType(conf)) {
      case IMPORT:
        subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
        configConnection = ConfigurationUtils.getConnectorConnection(conf);
        configJob = ConfigurationUtils.getConnectorJob(conf);
        break;
      case EXPORT:
        subContext = new PrefixContext(conf, "");
        configConnection = ConfigurationUtils.getFrameworkConnection(conf);
        configJob = ConfigurationUtils.getFrameworkJob(conf);
        break;
      default:
        throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0023);
View Full Code Here

      LOG.info("Skipping running destroyer as non was defined.");
      return;
    }

    // Objects that should be pass to the Destroyer execution
    PrefixContext subContext = new PrefixContext(configuration, JobConstants.PREFIX_CONNECTOR_CONTEXT);
    Object configConnection = ConfigurationUtils.getConnectorConnection(configuration);
    Object configJob = ConfigurationUtils.getConnectorJob(configuration);

    DestroyerContext destroyerContext = new DestroyerContext(subContext, success);
View Full Code Here

TOP

Related Classes of org.apache.sqoop.job.PrefixContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.