Package org.apache.phoenix.jdbc

Examples of org.apache.phoenix.jdbc.PhoenixConnection


        try {
            conn.setAutoCommit(true);
            conn.createStatement().execute("DELETE FROM " + DATA_TABLE_FULL_NAME);
            conn.createStatement().execute("ALTER TABLE " + DATA_TABLE_FULL_NAME + " SET IMMUTABLE_ROWS=true");
            conn.createStatement().executeQuery("SELECT COUNT(*) FROM " + DATA_TABLE_FULL_NAME).next();
            PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
            assertTrue(pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), DATA_TABLE_FULL_NAME)).isImmutableRows());
        } finally {
            conn.close();
        }
    }
View Full Code Here


 
    private static void assertActiveIndex(Connection conn, String schemaName, String tableName) throws SQLException {
        ImmutableBytesWritable ptr = new ImmutableBytesWritable();
        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
        conn.createStatement().executeQuery("SELECT count(*) FROM " + fullTableName).next(); // client side cache will update
        PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
        pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), fullTableName)).getIndexMaintainers(ptr);
        assertTrue(ptr.getLength() > 0);
    }
View Full Code Here

   
    private static void assertNoActiveIndex(Connection conn, String schemaName, String tableName) throws SQLException {
        ImmutableBytesWritable ptr = new ImmutableBytesWritable();
        String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
        conn.createStatement().executeQuery("SELECT count(*) FROM " + fullTableName).next(); // client side cache will update
        PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
        pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), fullTableName)).getIndexMaintainers(ptr);
        assertTrue(ptr.getLength() == 0);
    }
View Full Code Here

public class DefaultParallelIteratorsRegionSplitterIT extends BaseParallelIteratorsRegionSplitterIT {
   
    private static List<KeyRange> getSplits(Connection conn, long ts, final Scan scan)
            throws SQLException {
        TableRef tableRef = getTableRef(conn, ts);
        PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
        final List<HRegionLocation> regions =  pconn.getQueryServices().getAllTableRegions(tableRef.getTable().getPhysicalName().getBytes());
        PhoenixStatement statement = new PhoenixStatement(pconn);
        StatementContext context = new StatementContext(statement, null, scan, new SequenceManager(statement));
        DefaultParallelIteratorRegionSplitter splitter = new DefaultParallelIteratorRegionSplitter(context, tableRef, HintNode.EMPTY_HINT_NODE) {
            @Override
            protected List<HRegionLocation> getAllRegions() throws SQLException {
View Full Code Here

        conn.commit();
        conn.close();
    }

    protected static TableRef getTableRef(Connection conn, long ts) throws SQLException {
        PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
        TableRef table = new TableRef(null,pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), STABLE_NAME)),ts, false);
        return table;
    }
View Full Code Here

        }
    }
    @Test
    public void testCreateOnExistingTable() throws Exception {
        PhoenixConnection pconn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).unwrap(PhoenixConnection.class);
        String tableName = MDTEST_NAME;
        String schemaName = MDTEST_SCHEMA_NAME;
        byte[] cfA = Bytes.toBytes(SchemaUtil.normalizeIdentifier("a"));
        byte[] cfB = Bytes.toBytes(SchemaUtil.normalizeIdentifier("b"));
        byte[] cfC = Bytes.toBytes("c");
        byte[][] familyNames = new byte[][] {cfB, cfC};
        byte[] htableName = SchemaUtil.getTableNameAsBytes(schemaName, tableName);
        HBaseAdmin admin = pconn.getQueryServices().getAdmin();
        try {
            admin.disableTable(htableName);
            admin.deleteTable(htableName);
            admin.enableTable(htableName);
        } catch (org.apache.hadoop.hbase.TableNotFoundException e) {
        }
       
        HTableDescriptor descriptor = new HTableDescriptor(htableName);
        for (byte[] familyName : familyNames) {
            HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
            descriptor.addFamily(columnDescriptor);
        }
        admin.createTable(descriptor);
           
        long ts = nextTimestamp();
        Properties props = new Properties();
        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
        PhoenixConnection conn1 = DriverManager.getConnection(getUrl(), props).unwrap(PhoenixConnection.class);
        ensureTableCreated(getUrl(), tableName, null, ts);
       
        descriptor = admin.getTableDescriptor(htableName);
        assertEquals(3,descriptor.getColumnFamilies().length);
        HColumnDescriptor cdA = descriptor.getFamily(cfA);
        assertTrue(cdA.getKeepDeletedCells());
        assertEquals(DataBlockEncoding.NONE, cdA.getDataBlockEncoding()); // Overriden using WITH
        assertEquals(1,cdA.getMaxVersions());// Overriden using WITH
        HColumnDescriptor cdB = descriptor.getFamily(cfB);
        assertFalse(cdB.getKeepDeletedCells()); // Allow KEEP_DELETED_CELLS to be false for VEIW
        assertEquals(DataBlockEncoding.NONE, cdB.getDataBlockEncoding()); // Should keep the original value.
        // CF c should stay the same since it's not a Phoenix cf.
        HColumnDescriptor cdC = descriptor.getFamily(cfC);
        assertNotNull("Column family not found", cdC);
        assertFalse(cdC.getKeepDeletedCells());
        assertFalse(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING == cdC.getDataBlockEncoding());
        assertTrue(descriptor.hasCoprocessor(UngroupedAggregateRegionObserver.class.getName()));
        assertTrue(descriptor.hasCoprocessor(GroupedAggregateRegionObserver.class.getName()));
        assertTrue(descriptor.hasCoprocessor(ServerCachingEndpointImpl.class.getName()));
        admin.close();
        
        int rowCount = 5;
        String upsert = "UPSERT INTO " + tableName + "(id,col1,col2) VALUES(?,?,?)";
        PreparedStatement ps = conn1.prepareStatement(upsert);
        for (int i = 0; i < rowCount; i++) {
            ps.setString(1, Integer.toString(i));
            ps.setInt(2, i+1);
            ps.setInt(3, i+2);
            ps.execute();
        }
        conn1.commit();
        conn1.close();
       
        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
        Connection conn2 = DriverManager.getConnection(getUrl(), props);
        String query = "SELECT count(1) FROM " + tableName;
        ResultSet rs = conn2.createStatement().executeQuery(query);
View Full Code Here

        conn2.close();
    }
   
    @Test
    public void testCreateViewOnExistingTable() throws Exception {
        PhoenixConnection pconn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).unwrap(PhoenixConnection.class);
        String tableName = MDTEST_NAME;
        String schemaName = MDTEST_SCHEMA_NAME;
        byte[] cfB = Bytes.toBytes(SchemaUtil.normalizeIdentifier("b"));
        byte[] cfC = Bytes.toBytes("c");
        byte[][] familyNames = new byte[][] {cfB, cfC};
        byte[] htableName = SchemaUtil.getTableNameAsBytes(schemaName, tableName);
        HBaseAdmin admin = pconn.getQueryServices().getAdmin();
        try {
            admin.disableTable(htableName);
            admin.deleteTable(htableName);
            admin.enableTable(htableName);
        } catch (org.apache.hadoop.hbase.TableNotFoundException e) {
        } finally {
            admin.close();
        }
       
        HTableDescriptor descriptor = new HTableDescriptor(htableName);
        for (byte[] familyName : familyNames) {
            HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
            descriptor.addFamily(columnDescriptor);
        }
        admin.createTable(descriptor);
           
        long ts = nextTimestamp();
        Properties props = new Properties();
        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
        Connection conn1 = DriverManager.getConnection(getUrl(), props);
        String createStmt = "create view bogusTable" +
        "   (id char(1) not null primary key,\n" +
        "    a.col1 integer,\n" +
        "    d.col2 bigint)\n";
        try {
            conn1.createStatement().execute(createStmt);
            fail();
        } catch (TableNotFoundException e) {
            // expected to fail b/c table doesn't exist
        }
        createStmt = "create view " + MDTEST_NAME +
                "   (id char(1) not null primary key,\n" +
                "    a.col1 integer,\n" +
                "    b.col2 bigint)\n";
        try {
            conn1.createStatement().execute(createStmt);
            fail();
        } catch (ReadOnlyTableException e) {
            // expected to fail b/c cf a doesn't exist
        }
        createStmt = "create view " + MDTEST_NAME +
        "   (id char(1) not null primary key,\n" +
        "    b.col1 integer,\n" +
        "    c.col2 bigint)\n";
        try {
            conn1.createStatement().execute(createStmt);
            fail();
        } catch (ReadOnlyTableException e) {
            // expected to fail b/c cf C doesn't exist (case issue)
        }

        createStmt = "create view " + MDTEST_NAME +
        "   (id char(1) not null primary key,\n" +
        "    b.col1 integer,\n" +
        "    \"c\".col2 bigint) \n";
        // should be ok now
        conn1.createStatement().execute(createStmt);
        conn1.close();
                
        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
        PhoenixConnection conn2 = DriverManager.getConnection(getUrl(), props).unwrap(PhoenixConnection.class);
       
        ResultSet rs = conn2.getMetaData().getTables(null, null, MDTEST_NAME, null);
        assertTrue(rs.next());
        assertEquals(ViewType.MAPPED.name(), rs.getString(PhoenixDatabaseMetaData.VIEW_TYPE));
        assertFalse(rs.next());

        String deleteStmt = "DELETE FROM " + MDTEST_NAME;
        PreparedStatement ps = conn2.prepareStatement(deleteStmt);
        try {
            ps.execute();
            fail();
        } catch (ReadOnlyTableException e) {
            // expected to fail b/c table is read-only
        }
        try {
            String upsert = "UPSERT INTO " + MDTEST_NAME + "(id,col1,col2) VALUES(?,?,?)";
            ps = conn2.prepareStatement(upsert);
            try {
                ps.setString(1, Integer.toString(0));
                ps.setInt(2, 1);
                ps.setInt(3, 2);
                ps.execute();
                fail();
            } catch (ReadOnlyTableException e) {
                // expected to fail b/c table is read-only
            }
            conn2.createStatement().execute("ALTER VIEW " + MDTEST_NAME + " SET IMMUTABLE_ROWS=TRUE");
           
            HTableInterface htable = conn2.getQueryServices().getTable(SchemaUtil.getTableNameAsBytes(MDTEST_SCHEMA_NAME,MDTEST_NAME));
            Put put = new Put(Bytes.toBytes("0"));
            put.add(cfB, Bytes.toBytes("COL1"), ts+6, PDataType.INTEGER.toBytes(1));
            put.add(cfC, Bytes.toBytes("COL2"), ts+6, PDataType.LONG.toBytes(2));
            htable.put(put);
            conn2.close();
           
            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 10));
            Connection conn7 = DriverManager.getConnection(getUrl(), props);
            // Should be ok b/c we've marked the view with IMMUTABLE_ROWS=true
            conn7.createStatement().execute("CREATE INDEX idx ON " + MDTEST_NAME + "(B.COL1)");
View Full Code Here

            + CSV_VALUES_BAD_ENCAPSULATED_CONTROL_CHARS;

    @Test
    public void testCSVCommonsUpsert() throws Exception {
        CSVParser parser = null;
        PhoenixConnection conn = null;
        try {

            // Create table
            String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
                    + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
            conn = DriverManager.getConnection(getUrl()).unwrap(
                    PhoenixConnection.class);
            PhoenixRuntime.executeStatements(conn,
                    new StringReader(statements), null);

            // Upsert CSV file
            CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
                    Collections.<String> emptyList(), true);
            csvUtil.upsert(new StringReader(STOCK_CSV_VALUES_WITH_HEADER));

            // Compare Phoenix ResultSet with CSV file content
            PreparedStatement statement = conn
                    .prepareStatement("SELECT SYMBOL, COMPANY FROM "
                            + STOCK_TABLE);
            ResultSet phoenixResultSet = statement.executeQuery();
            parser = new CSVParser(new StringReader(
                    STOCK_CSV_VALUES_WITH_HEADER), csvUtil.getFormat());
            for (CSVRecord record : parser) {
                assertTrue(phoenixResultSet.next());
                int i = 0;
                for (String value : record) {
                    assertEquals(value, phoenixResultSet.getString(i + 1));
                    i++;
                }
            }
            assertFalse(phoenixResultSet.next());
        } finally {
            if (parser != null)
                parser.close();
            if (conn != null)
                conn.close();
        }
    }
View Full Code Here

    }

    @Test
    public void testTDVCommonsUpsert() throws Exception {
        CSVParser parser = null;
        PhoenixConnection conn = null;
        try {

            // Create table
            String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
                    + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
            conn = DriverManager.getConnection(getUrl()).unwrap(
                    PhoenixConnection.class);
            PhoenixRuntime.executeStatements(conn,
                    new StringReader(statements), null);

            // Upsert TDV file
            CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,Collections.<String> emptyList()
                    , true, '\t', '"', null, CSVCommonsLoader.DEFAULT_ARRAY_ELEMENT_SEPARATOR);
            csvUtil.upsert(new StringReader(STOCK_TDV_VALUES_WITH_HEADER));

            // Compare Phoenix ResultSet with CSV file content
            PreparedStatement statement = conn
                    .prepareStatement("SELECT SYMBOL, COMPANY FROM "
                            + STOCK_TABLE);
            ResultSet phoenixResultSet = statement.executeQuery();
            parser = new CSVParser(new StringReader(
                    STOCK_TDV_VALUES_WITH_HEADER), csvUtil.getFormat());
            for (CSVRecord record : parser) {
                assertTrue(phoenixResultSet.next());
                int i = 0;
                for (String value : record) {
                    assertEquals(value, phoenixResultSet.getString(i + 1));
                    i++;
                }
            }
            assertFalse(phoenixResultSet.next());
        } finally {
            if (parser != null)
                parser.close();
            if (conn != null)
                conn.close();
        }
    }
View Full Code Here

    }

    @Test
    public void testCSVUpsertWithCustomDelimiters() throws Exception {
        CSVParser parser = null;
        PhoenixConnection conn = null;
        try {
            // Create table
            String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
                    + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
            conn = DriverManager.getConnection(getUrl()).unwrap(
                    PhoenixConnection.class);
            PhoenixRuntime.executeStatements(conn,
                    new StringReader(statements), null);

            // Upsert CSV file
            CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
                    Arrays.<String> asList(STOCK_COLUMNS), true,
                    '1', '2', '3', CSVCommonsLoader.DEFAULT_ARRAY_ELEMENT_SEPARATOR);
            csvUtil.upsert(new StringReader(STOCK_CSV_VALUES_WITH_DELIMITER));

            // Compare Phoenix ResultSet with CSV file content
            PreparedStatement statement = conn
                    .prepareStatement("SELECT SYMBOL, COMPANY FROM "
                            + STOCK_TABLE);
            ResultSet phoenixResultSet = statement.executeQuery();
            parser = new CSVParser(new StringReader(
                    STOCK_CSV_VALUES_WITH_DELIMITER), csvUtil.getFormat());
            for (CSVRecord record : parser) {
                assertTrue(phoenixResultSet.next());
                int i = 0;
                for (String value : record) {
                    assertEquals(value, phoenixResultSet.getString(i + 1));
                    i++;
                }
            }
            assertFalse(phoenixResultSet.next());
        } finally {
            if (parser != null)
                parser.close();
            if (conn != null)
                conn.close();
        }
    }
View Full Code Here

TOP

Related Classes of org.apache.phoenix.jdbc.PhoenixConnection

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.