Package com.cloudera.sqoop

Examples of com.cloudera.sqoop.SqoopOptions


    expectExceptionInCharCode(254);
  }

  public void testDifferentTableNames() throws Exception {
    Configuration conf = new Configuration();
    SqoopOptions options = new SqoopOptions();
    TableDefWriter writer = new TableDefWriter(options, null,
        "inputTable", "outputTable", conf, false);

    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
    writer.setColumnTypes(colTypes);
View Full Code Here


    String targetDir = "targetDir";
    String inputTable = "inputTable";
    String outputTable = "outputTable";

    Configuration conf = new Configuration();
    SqoopOptions options = new SqoopOptions();
    // Specify a different target dir from input table name
    options.setTargetDir(targetDir);
    TableDefWriter writer = new TableDefWriter(options, null,
        inputTable, outputTable, conf, false);

    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
    writer.setColumnTypes(colTypes);
View Full Code Here

    String[] args = {
        "--hive-partition-key", "ds",
        "--hive-partition-value", "20110413",
    };
    Configuration conf = new Configuration();
    SqoopOptions options =
      new ImportTool().parseArguments(args, null, null, false);
    TableDefWriter writer = new TableDefWriter(options,
        null, "inputTable", "outputTable", conf, false);

    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
View Full Code Here

    String[] args = {
        "--compress",
        "--compression-codec", "lzop",
    };
    Configuration conf = new Configuration();
    SqoopOptions options =
      new ImportTool().parseArguments(args, null, null, false);
    TableDefWriter writer = new TableDefWriter(options,
        null, "inputTable", "outputTable", conf, false);

    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
View Full Code Here

  public void testUserMapping() throws Exception {
    String[] args = {
        "--map-column-hive", "id=STRING,value=INTEGER",
    };
    Configuration conf = new Configuration();
    SqoopOptions options =
      new ImportTool().parseArguments(args, null, null, false);
    TableDefWriter writer = new TableDefWriter(options,
        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);

    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
View Full Code Here

  public void testUserMappingFailWhenCantBeApplied() throws Exception {
    String[] args = {
        "--map-column-hive", "id=STRING,value=INTEGER",
    };
    Configuration conf = new Configuration();
    SqoopOptions options =
      new ImportTool().parseArguments(args, null, null, false);
    TableDefWriter writer = new TableDefWriter(options,
        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);

    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
View Full Code Here

   */
  public void importTable(com.cloudera.sqoop.manager.ImportJobContext context)
    throws IOException, ImportException {

    String tableName = context.getTableName();
    SqoopOptions options = context.getOptions();

    LOG.info("Beginning psql fast path import");

    if (options.getFileLayout() != SqoopOptions.FileLayout.TextFile) {
      // TODO(aaron): Support SequenceFile-based load-in
      LOG.warn("File import layout" + options.getFileLayout()
          + " is not supported by");
      LOG.warn("Postgresql direct import; import will proceed as text files.");
    }

    if (!StringUtils.equals(options.getNullStringValue(),
      options.getNullNonStringValue())) {
      throw new ImportException(
        "Detected different values of --input-string and --input-non-string " +
        "parameters. PostgreSQL direct manager do not support that. Please " +
        "either use the same values or omit the --direct parameter.");
    }

    String commandFilename = null;
    String passwordFilename = null;
    Process p = null;
    AsyncSink sink = null;
    AsyncSink errSink = null;
    PerfCounters counters = new PerfCounters();

    try {
      // Get the COPY TABLE command to issue, write this to a file, and pass
      // it in to psql with -f filename.  Then make sure we delete this file
      // in our finally block.
      String copyCmd = getCopyCommand(tableName);
      commandFilename = writeCopyCommand(copyCmd);

      // Arguments to pass to psql on the command line.
      ArrayList<String> args = new ArrayList<String>();

      // Environment to pass to psql.
      List<String> envp = Executor.getCurEnvpStrings();

      // We need to parse the connect string URI to determine the database
      // name and the host and port. If the host is localhost and the port is
      // not specified, we don't want to pass this to psql, because we want to
      // force the use of a UNIX domain socket, not a TCP/IP socket.
      String connectString = options.getConnectString();
      String databaseName = JdbcUrl.getDatabaseName(connectString);
      String hostname = JdbcUrl.getHostName(connectString);
      int port = JdbcUrl.getPort(connectString);

      if (null == databaseName) {
        throw new ImportException("Could not determine database name");
      }

      LOG.info("Performing import of table " + tableName + " from database "
          + databaseName);
      args.add(PSQL_CMD); // requires that this is on the path.
      args.add("--tuples-only");
      args.add("--quiet");

      String username = options.getUsername();
      if (username != null) {
        args.add("--username");
        args.add(username);
        String password = options.getPassword();
        if (null != password) {
          passwordFilename =
            PostgreSQLUtils.writePasswordFile(options.getTempDir(), password);
          // Need to send PGPASSFILE environment variable specifying
          // location of our postgres file.
          envp.add("PGPASSFILE=" + passwordFilename);
        }
      }

      args.add("--host");
      args.add(hostname);

      if (port != -1) {
        args.add("--port");
        args.add(Integer.toString(port));
      }

      if (null != databaseName && databaseName.length() > 0) {
        args.add(databaseName);
      }

      // The COPY command is in a script file.
      args.add("-f");
      args.add(commandFilename);

      // begin the import in an external process.
      LOG.debug("Starting psql with arguments:");
      for (String arg : args) {
        LOG.debug("  " + arg);
      }

      // This writer will be closed by AsyncSink.
      SplittableBufferedWriter w = DirectImportUtils.createHdfsSink(
          options.getConf(), options, context);

      // Actually start the psql dump.
      p = Runtime.getRuntime().exec(args.toArray(new String[0]),
          envp.toArray(new String[0]));

View Full Code Here

  private void create(
      String name,
      String type,
      String schema,
      CreateIt createIt) {
    SqoopOptions options = new SqoopOptions(CONNECT_STRING, name);
    options.setUsername(DATABASE_USER);

    ConnManager manager = null;
    Statement st = null;

    try {
View Full Code Here

  public static final Log LOG = LogFactory.getLog(
      DefaultManagerFactory.class.getName());

  public ConnManager accept(JobData data) {
    SqoopOptions options = data.getSqoopOptions();

    String scheme = extractScheme(options);
    if (null == scheme) {
      // We don't know if this is a mysql://, hsql://, etc.
      // Can't do anything with this.
      LOG.warn("Null scheme associated with connect string.");
      return null;
    }

    LOG.debug("Trying with scheme: " + scheme);

    if (scheme.equals("jdbc:mysql:")) {
      if (options.isDirect()) {
        return new DirectMySQLManager(options);
      } else {
        return new MySQLManager(options);
      }
    } else if (scheme.equals("jdbc:postgresql:")) {
      if (options.isDirect()) {
        return new DirectPostgresqlManager(options);
      } else {
        return new PostgresqlManager(options);
      }
    } else if (scheme.startsWith("jdbc:hsqldb:")) {
View Full Code Here

      return 1;
    }

    // Create a SqoopOptions and Configuration based on the current one,
    // but deep-copied. This will be populated within the job.
    SqoopOptions jobOptions = new SqoopOptions();
    jobOptions.setConf(new Configuration(options.getConf()));

    // Get the arguments to feed to the child tool.
    String [] childArgs = Arrays.copyOfRange(extraArguments, toolArgPos + 1,
        extraArguments.length);
View Full Code Here

TOP

Related Classes of com.cloudera.sqoop.SqoopOptions

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.