Package org.apache.openjpa.jdbc.schema

Examples of org.apache.openjpa.jdbc.schema.Table


        }

        SchemaGroup group = new SchemaGroup();
        Schema schema = group.addSchema(schemaName);

        Table table = schema.addTable(tableName);
        _pkColumn = addPrimaryKeyColumn(table);
        PrimaryKey pk = table.addPrimaryKey();
        pk.addColumn(_pkColumn);

        DBDictionary dict = _conf.getDBDictionaryInstance();
        _seqColumn = table.addColumn(dict.getValidColumnName
            (_seqColumnName, table));
        _seqColumn.setType(dict.getPreferredType(Types.BIGINT));
        _seqColumn.setJavaType(JavaTypes.LONG);
       
        if (_uniqueColumnNames != null) {
            DBIdentifier uniqueName = _uniqueConstraintName;
            if (DBIdentifier.isEmpty(uniqueName)) {
                uniqueName = dict.getValidUniqueName(DBIdentifier.newConstraint("UNQ"), table);
            }
        Unique u = table.addUnique(uniqueName);
        for (DBIdentifier columnName : _uniqueColumnNames) {
          if (!table.containsColumn(columnName, _conf.getDBDictionaryInstance()))
                    throw new UserException(_loc.get("unique-missing-column",
                            columnName, table.getIdentifier(),
                            table.getColumnNames()));
          Column col = table.getColumn(columnName);
          u.addColumn(col);
        }
        }
       
    }
View Full Code Here


        // class will leave dangling relations; we might be able
        // to issue bulk deletes separately for the joins (possibly
        // using a temporary table to select the primary keys for
        // all the related tables and then issing a delete against those
        // keys), but that logic is not currently implemented
        Table table = getTable(mapping.getFieldMappings(), null);
        if (table == INVALID)
            return false;

        if (subclasses) {
            // if we are including subclasses, we also need to gather
View Full Code Here

        DBDictionary dict = conf.getDBDictionaryInstance();
        ClassMapping mapping =  (ClassMapping)conf.
            getMetaDataRepositoryInstance().
            getMetaData(EntityVeryLongNames.class,getClass().
                getClassLoader(), true);
        Table table = mapping.getTable();
        assertTrue(table.getName().length() > 0);
        assertTrue(table.getName().length() <= dict.maxTableNameLength);
        validateColumnNames(table, dict);
        mapping =  (ClassMapping)conf.
            getMetaDataRepositoryInstance().
            getMetaData(EntityReservedWords.class,getClass().
                getClassLoader(), true);
        table = mapping.getTable();
        assertTrue(table.getName().length() > 0);
        assertTrue(table.getName().length() <= dict.maxTableNameLength);
        validateColumnNames(table, dict);
    }
View Full Code Here

        return getTableIdentifier(fm, schema).getName();
    }

    public DBIdentifier getTableIdentifier(FieldMapping fm, Schema schema) {
        DBIdentifier sName = DBIdentifier.newTable(fm.getName());
        Table table = fm.getDefiningMapping().getTable();
        if (table != null) {
            DBIdentifier tableName = DBIdentifier.truncate(table.getIdentifier(),5);
            sName = DBIdentifier.append(tableName, fm.getName());
        }
        if (!_defMissing)
            sName = dict.getValidTableName(sName, schema);
        return sName;
View Full Code Here

    /**
     * Return the named table for the given class.
     */
    public Table getTable(final ClassMapping cls, DBIdentifier tableName,
        boolean adapt) {
        Table t = createTable(cls, new TableDefaults() {
            public String get(Schema schema) {
                // delay this so that we don't do schema reflection for unique
                // table name unless necessary
                return cls.getMappingRepository().getMappingDefaults().
                    getTableName(cls, schema);
            }
            public DBIdentifier getIdentifier(Schema schema) {
                return cls.getMappingRepository().getMappingDefaults().
                    getTableIdentifier(cls, schema);
            }
        }, _schemaName, tableName, adapt);
        t.setComment(cls.getTypeAlias() == null
            ? cls.getDescribedType().getName()
            : cls.getTypeAlias());
        return t;
    }
View Full Code Here

        for (DBIdentifier tableName : _uniques.keySet()) {
          List<Unique> uniqueConstraints = _uniques.get(tableName);
          for (Unique template : uniqueConstraints) {
            Column[] templateColumns = template.getColumns();
                Column[] uniqueColumns = new Column[templateColumns.length];
                Table table = getTable((ClassMapping)cm, tableName, adapt);
            for (int i=0; i<uniqueColumns.length; i++) {
                    DBIdentifier columnName = templateColumns[i].getIdentifier();
              if (!table.containsColumn(columnName)) {
                        throw new UserException(_loc.get(
                                "unique-missing-column",
                                new Object[]{cm, columnName, tableName,
                                Arrays.toString(table.getColumnNames())}));
              }
                    Column uniqueColumn = table.getColumn(columnName);
              uniqueColumns[i] = uniqueColumn;
            }
            Unique unique = createUnique(cm, "unique", template, 
                uniqueColumns, adapt);
            if (unique != null)
View Full Code Here

     * Return the columns set for this discriminator, based on the given
     * templates.
     */
    public Column[] getColumns(Discriminator discrim, Column[] tmplates,
        boolean adapt) {
        Table table = discrim.getClassMapping().getTable();
        discrim.getMappingRepository().getMappingDefaults().populateColumns
            (discrim, table, tmplates);
        return createColumns(discrim, null, tmplates, table, adapt);
    }
View Full Code Here

    }

    private Column getCol(String name) {
        ClassMapping mapping = getMapping(BlobColumnEntity.class);

        Table t = mapping.getTable();
        Column col = t.getColumn(DBIdentifier.newIdentifier(name, DBIdentifierType.COLUMN, true));
        assertNotNull(col);
        return col;
    }
View Full Code Here

     */
    public Column[] getColumns(Version version, Column[] templates,
        boolean adapt) {
      if (spansMultipleTables(templates))
        return getMultiTableColumns(version, templates, adapt);
        Table table = getSingleTable(version, templates);
        version.getMappingRepository().getMappingDefaults().populateColumns
            (version, table, templates);
        return createColumns(version, null, templates, table, adapt);
    }
View Full Code Here

     * Return the columns set for this version when the columns are spread
     * across multiple tables.
     */
    public Column[] getMultiTableColumns(Version vers, Column[] templates,
            boolean adapt) {
      Table primaryTable = vers.getClassMapping().getTable();
      List<DBIdentifier> secondaryTableNames = Arrays.asList(vers
                .getClassMapping().getMappingInfo().getSecondaryTableIdentifiers());
        Map<Table, List<Column>> assign = new LinkedHashMap<Table,
                List<Column>>();
      for (Column col : templates) {
          DBIdentifier tableName = col.getTableIdentifier();
          Table table;
        if (DBIdentifier.isEmpty(tableName)
          || tableName.equals(primaryTable.getIdentifier())) {
          table = primaryTable;
        } else if (secondaryTableNames.contains(tableName)) {
          table = primaryTable.getSchema().getTable(tableName);
        } else {
                throw new UserException(_loc.get("bad-version-column-table",
              col.getIdentifier().toString(), tableName));
        }
        if (!assign.containsKey(table))
          assign.put(table, new ArrayList<Column>());
        assign.get(table).add(col);
      }
      MappingDefaults def = vers.getMappingRepository().getMappingDefaults();
      List<Column> result = new ArrayList<Column>();

        Set<Map.Entry<Table,List<Column>>> assignSet = assign.entrySet();
        for (Map.Entry<Table,List<Column>> assignEntry : assignSet) {
            Table table = assignEntry.getKey();
            List<Column> cols = assignEntry.getValue();
            Column[] partTemplates = cols.toArray(new Column[cols.size()]);
            def.populateColumns(vers, table, partTemplates);
            result.addAll(Arrays.asList(createColumns(vers, null, partTemplates,
            table, adapt)));
View Full Code Here

TOP

Related Classes of org.apache.openjpa.jdbc.schema.Table

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.