Package org.apache.sqoop.schema

Examples of org.apache.sqoop.schema.Schema


  public static Schema restoreSchemna(JSONObject jsonObject) {
    String name = (String)jsonObject.get(NAME);
    String note = (String)jsonObject.get(NOTE);
    java.util.Date date = new java.util.Date((Long)jsonObject.get(CREATION_DATE));

    Schema schema = new Schema(name)
      .setNote(note)
      .setCreationDate(date);

    JSONArray columnsArray = (JSONArray)jsonObject.get(COLUMNS);
    for (Object obj : columnsArray) {
      schema.addColumn(restoreColumn((JSONObject)obj));
    }

    return schema;
  }
View Full Code Here


    Object configConnection = null;
    Object configJob = null;

    // Propagate connector schema in every case for now
    // TODO: Change to coditional choosing between HIO and Connector schema
    Schema schema = ConfigurationUtils.getConnectorSchema(conf);

    // Executor is in connector space for IMPORT and in framework space for EXPORT
    switch (ConfigurationUtils.getJobType(conf)) {
      case IMPORT:
        subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
View Full Code Here

    Object configConnection = ConfigurationUtils.getConfigConnectorConnection(configuration);
    Object configJob = ConfigurationUtils.getConfigConnectorJob(configuration);

    // Propagate connector schema in every case for now
    // TODO: Change to coditional choosing between HIO and Connector schema
    Schema schema = ConfigurationUtils.getConnectorSchema(configuration);

    DestroyerContext destroyerContext = new DestroyerContext(subContext, success, schema);

    LOG.info("Executing destroyer class " + destroyer.getClass());
    destroyer.destroy(destroyerContext, configConnection, configJob);
View Full Code Here

    Partitioner partitioner = (Partitioner) ClassUtils.instantiate(partitionerName);

    PrefixContext connectorContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
    Object connectorConnection = ConfigurationUtils.getConfigConnectorConnection(conf);
    Object connectorJob = ConfigurationUtils.getConfigConnectorJob(conf);
    Schema schema = ConfigurationUtils.getConnectorSchema(conf);

    long maxPartitions = conf.getLong(JobConstants.JOB_ETL_EXTRACTOR_NUM, 10);
    PartitionerContext partitionerContext = new PartitionerContext(connectorContext, maxPartitions, schema);

    List<Partition> partitions = partitioner.getPartitions(partitionerContext, connectorConnection, connectorJob);
View Full Code Here

        // Objects that should be pass to the Executor execution
        PrefixContext subContext = null;
        Object configConnection = null;
        Object configJob = null;
        Schema schema = null;

        if (!isTest) {
          // Propagate connector schema in every case for now
          // TODO: Change to coditional choosing between HIO and Connector schema
          schema = ConfigurationUtils.getConnectorSchema(conf);
View Full Code Here

   *
   * @param name Name that should be used for the generated schema.
   * @return
   */
  public Schema getSchema(String name) {
    return new Schema(name)
      .addColumn(new FixedPoint("ICOL"))
      .addColumn(new FloatingPoint("DCOL"))
      .addColumn(new Text("VCOL"))
    ;
  }
View Full Code Here

    InitializerContext initializerContext = new InitializerContext(context);

    @SuppressWarnings("rawtypes")
    Initializer initializer = new GenericJdbcImportInitializer();
    initializer.initialize(initializerContext, connConf, jobConf);
    Schema schema = initializer.getSchema(initializerContext, connConf, jobConf);
    assertEquals(getSchema(tableName), schema);
  }
View Full Code Here

    InitializerContext initializerContext = new InitializerContext(context);

    @SuppressWarnings("rawtypes")
    Initializer initializer = new GenericJdbcImportInitializer();
    initializer.initialize(initializerContext, connConf, jobConf);
    Schema schema = initializer.getSchema(initializerContext, connConf, jobConf);
    assertEquals(getSchema("Query"), schema);
  }
View Full Code Here

    String schemaName = importJobConfiguration.table.tableName;
    if(schemaName == null) {
      schemaName = "Query";
    }

    Schema schema = new Schema(schemaName);

    ResultSet rs = null;
    ResultSetMetaData rsmt = null;
    try {
      rs = executor.executeQuery(
        context.getString(GenericJdbcConnectorConstants.CONNECTOR_JDBC_DATA_SQL)
          .replace(GenericJdbcConnectorConstants.SQL_CONDITIONS_TOKEN, "1 = 0")
      );

      rsmt = rs.getMetaData();
      for (int i = 1 ; i <= rsmt.getColumnCount(); i++) {
        Column column = SqlTypesUtils.sqlTypeToAbstractType(rsmt.getColumnType(i));

        String columnName = rsmt.getColumnName(i);
        if (columnName == null || columnName.equals("")) {
          columnName = rsmt.getColumnLabel(i);
          if (null == columnName) {
            columnName = "Column " + i;
          }
        }

        column.setName(columnName);
        schema.addColumn(column);
      }

      return schema;
    } catch (SQLException e) {
      throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0016, e);
View Full Code Here

    ConfigurationUtils.setHioSchema(job, null);
    assertNull(ConfigurationUtils.getHioSchema(jobConf));
  }

  private Schema getSchema(String name) {
    return new Schema(name).addColumn(new Text("c1"));
  }
View Full Code Here

TOP

Related Classes of org.apache.sqoop.schema.Schema

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.