Package com.facebook.presto.spi

Examples of com.facebook.presto.spi.ColumnHandle


                    throws Exception
            {
                StorageManagerDao dao = handle.attach(StorageManagerDao.class);

                for (Map.Entry<ColumnHandle, File> entry : columnFileHandle.getFiles().entrySet()) {
                    ColumnHandle columnHandle = entry.getKey();
                    File file = entry.getValue();

                    checkState(columnHandle instanceof NativeColumnHandle, "Can only import in a native column");
                    long columnId = ((NativeColumnHandle) columnHandle).getColumnId();
                    String filename = file.getName();
View Full Code Here


    private boolean shouldPrunePartition(Session session, Partition partition, Expression predicate, Map<ColumnHandle, Symbol> columnToSymbol)
    {
        // translate assignments from column->value to symbol->value
        ImmutableMap.Builder<Symbol, Object> assignments = ImmutableMap.builder();
        for (Map.Entry<ColumnHandle, Object> entry : partition.getKeys().entrySet()) {
            ColumnHandle columnHandle = entry.getKey();
            if (columnToSymbol.containsKey(columnHandle)) {
                Symbol symbol = columnToSymbol.get(columnHandle);
                assignments.put(symbol, entry.getValue());
            }
        }
View Full Code Here

    {
        String partitionName = tablePartition.getPartitionName();

        ImmutableMap.Builder<ColumnHandle, Object> builder = ImmutableMap.builder();
        for (PartitionKey partitionKey : allPartitionKeys.get(partitionName)) {
            ColumnHandle columnHandle = columnHandles.get(partitionKey.getName());
            checkArgument(columnHandles != null, "Invalid partition key for column %s in partition %s", partitionKey.getName(), tablePartition.getPartitionName());

            String value = partitionKey.getValue();
            switch (partitionKey.getType()) {
                case BOOLEAN:
View Full Code Here

            for (ColumnMetadata column : sourceTableMetadata.getColumns()) {
                Field field = Field.newQualified(sourceTable.asQualifiedName(), Optional.of(column.getName()), Type.fromRaw(column.getType()));
                Symbol symbol = symbolAllocator.newSymbol(field);

                inputColumnsBuilder.put(symbol, sourceTableColumns.get(column.getName()));
                ColumnHandle targetColumnHandle = targetTableColumns.get(column.getName());
                fields.add(field);
                columnHandleBuilder.add(targetColumnHandle);
                outputSymbolsBuilder.add(symbol);
            }

            ImmutableList<Symbol> outputSymbols = outputSymbolsBuilder.build();
            plan = new RelationPlan(new TableScanNode(idAllocator.getNextId(), sourceTableHandle, outputSymbols, inputColumnsBuilder.build(), TRUE_LITERAL, TRUE_LITERAL), new TupleDescriptor(fields.build()), outputSymbols);

            targetColumnHandles = columnHandleBuilder.build();
        }
        else {
            RelationPlanner planner = new RelationPlanner(analysis, symbolAllocator, idAllocator, metadata, session);
            plan = planner.process(analysis.getQuery(), null);

            // TODO: create table and periodic import in pre-execution step, not here

            // Create the destination table
            ImmutableList.Builder<ColumnMetadata> columns = ImmutableList.builder();
            for (int i = 0; i < plan.getDescriptor().getFields().size(); i++) {
                Field field = plan.getDescriptor().getFields().get(i);
                String name = field.getName().or("_field" + i);
                ColumnMetadata columnMetadata = new ColumnMetadata(name, field.getType().getColumnType(), i, false);
                columns.add(columnMetadata);
            }

            // TODO: first argument should actually be connectorId
            TableMetadata tableMetadata = new TableMetadata(destination.getCatalogName(), new ConnectorTableMetadata(destination.asSchemaTableName(), columns.build()));
            targetTable = metadata.createTable(destination.getCatalogName(), tableMetadata);

            // get the column handles for the destination table
            Map<String, ColumnHandle> columnHandleIndex = metadata.getColumnHandles(targetTable);
            ImmutableList.Builder<ColumnHandle> columnHandleBuilder = ImmutableList.builder();
            for (ColumnMetadata column : tableMetadata.getColumns()) {
                columnHandleBuilder.add(columnHandleIndex.get(column.getName()));
            }
            targetColumnHandles = columnHandleBuilder.build();

            // find source table (TODO: do this in analyzer)
            QueryBody queryBody = analysis.getQuery().getQueryBody();
            checkState(queryBody instanceof QuerySpecification, "Query is not a simple select statement");
            List<Relation> relations = ((QuerySpecification) queryBody).getFrom();
            checkState(relations.size() == 1, "Query has more than one source table");
            Relation relation = Iterables.getOnlyElement(relations);
            checkState(relation instanceof Table, "FROM clause is not a simple table name");
            QualifiedTableName sourceTable = MetadataUtil.createQualifiedTableName(session, ((Table) relation).getName());

            // create source table and optional import information
            storageManager.insertTableSource(((NativeTableHandle) targetTable), sourceTable);

            // if a refresh is present, create a periodic import for this table
            if (analysis.getRefreshInterval().isPresent()) {
                PeriodicImportJob job = PeriodicImportJob.createJob(sourceTable, destination, analysis.getRefreshInterval().get());
                periodicImportManager.insertJob(job);
            }
        }

        // compute input symbol <-> column mappings
        ImmutableMap.Builder<Symbol, ColumnHandle> mappings = ImmutableMap.builder();

        for (int i = 0; i < targetColumnHandles.size(); i++) {
            ColumnHandle column = targetColumnHandles.get(i);
            Symbol symbol = plan.getSymbol(i);
            mappings.put(symbol, column);
        }

        // create writer node
View Full Code Here

    protected BlockIterable getBlockIterable(String tableName, String columnName, BlocksFileEncoding columnEncoding)
    {
        ConnectorMetadata metadata = new TpchMetadata();
        TableHandle tableHandle = metadata.getTableHandle(new SchemaTableName(TPCH_SCHEMA_NAME, tableName));
        ColumnHandle columnHandle = metadata.getColumnHandle(tableHandle, columnName);
        checkArgument(columnHandle != null, "Table %s does not have a column %s", tableName, columnName);
        return getTpchBlocksProvider().getBlocks((TpchTableHandle) tableHandle, (TpchColumnHandle) columnHandle, 0, 1, columnEncoding);
    }
View Full Code Here

        assertEquals(((NativeTableHandle) tableHandle).getTableId(), 1);

        ConnectorTableMetadata table = metadata.getTableMetadata(tableHandle);
        assertTableEqual(table, getOrdersTable());

        ColumnHandle columnHandle = metadata.getColumnHandle(tableHandle, "orderkey");
        assertInstanceOf(columnHandle, NativeColumnHandle.class);
        assertEquals(((NativeColumnHandle) columnHandle).getColumnId(), 1);
    }
View Full Code Here

    }

    private StageExecutionPlan createTableScanPlan(String planId, MetadataManager metadata, int splitCount)
    {
        TableHandle tableHandle = metadata.getTableHandle(new QualifiedTableName("default", "default", DualMetadata.NAME)).get();
        ColumnHandle columnHandle = metadata.getColumnHandle(tableHandle, DualMetadata.COLUMN_NAME).get();
        Symbol symbol = new Symbol(DualMetadata.COLUMN_NAME);

        // table scan with 3 splits
        Split split = new DualSplit(HostAddress.fromString("127.0.0.1"));
        PlanNodeId tableScanNodeId = new PlanNodeId(planId);
View Full Code Here

            throws Exception
    {
        TableHandle tableHandle = getTableHandle(tableOfflinePartition);
        assertNotNull(tableHandle);

        ColumnHandle dsColumn = metadata.getColumnHandle(tableHandle, "ds");
        assertNotNull(dsColumn);

        TupleDomain tupleDomain = TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>of(dsColumn, Domain.singleValue("2012-12-30")));
        PartitionResult partitionResult = splitManager.getPartitions(tableHandle, tupleDomain);
        for (Partition partition : partitionResult.getPartitions()) {
View Full Code Here

            throws Exception
    {
        TableHandle tableHandle = getTableHandle(tableOfflinePartition);
        assertNotNull(tableHandle);

        ColumnHandle dsColumn = metadata.getColumnHandle(tableHandle, "ds");
        assertNotNull(dsColumn);

        List<Partition> partitions = splitManager.getPartitions(tableHandle, ImmutableMap.<ColumnHandle, Object>of(dsColumn, "2012-12-30"));
        for (Partition partition : partitions) {
            if ("2012-12-30".equals(partition.getKeys().get(dsColumn))) {
View Full Code Here

                    }

                    LinkedHashMap<String, String> keys = Warehouse.makeSpecFromName(partitionId);
                    ImmutableMap.Builder<ColumnHandle, Comparable<?>> builder = ImmutableMap.builder();
                    for (Entry<String, String> entry : keys.entrySet()) {
                        ColumnHandle columnHandle = columnsByName.get(entry.getKey());
                        checkArgument(columnHandle != null, "Invalid partition key %s in partition %s", entry.getKey(), partitionId);
                        checkArgument(columnHandle instanceof HiveColumnHandle, "columnHandle is not an instance of HiveColumnHandle");
                        HiveColumnHandle hiveColumnHandle = (HiveColumnHandle) columnHandle;

                        String value = entry.getValue();
View Full Code Here

TOP

Related Classes of com.facebook.presto.spi.ColumnHandle

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.