Package org.apache.falcon.entity.v0.feed

Examples of org.apache.falcon.entity.v0.feed.Feed


        Thread.sleep(1000);
    }

    private void initializeStagingDirs() throws Exception {
        final InputStream inputStream = getClass().getResourceAsStream("/config/feed/hive-table-feed.xml");
        Feed tableFeed = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(inputStream);
        getStore().publish(EntityType.FEED, tableFeed);

        final Cluster srcCluster = dfsCluster.getCluster();
        final CatalogStorage sourceStorage = (CatalogStorage) FeedHelper.createStorage(srcCluster, tableFeed);
        String sourceStagingDir = FeedHelper.getStagingDir(srcCluster, tableFeed, sourceStorage, Tag.REPLICATION);
View Full Code Here


        Assert.assertTrue(UpdateHelper.isWorkflowUpdated(cluster, process));
    }

    @Test
    public void testShouldUpdateProcess() throws Exception {
        Feed oldFeed = parser.parseAndValidate(this.getClass()
                .getResourceAsStream(FEED_XML));
        String cluster = "testCluster";
        Feed newFeed = (Feed) oldFeed.copy();
        Assert.assertFalse(UpdateHelper.isEntityUpdated(oldFeed, newFeed, cluster));

        newFeed.setGroups("newgroups");
        Assert.assertFalse(UpdateHelper.isEntityUpdated(oldFeed, newFeed, cluster));
        newFeed.getLateArrival().setCutOff(Frequency.fromString("hours(8)"));
        Assert.assertFalse(UpdateHelper.isEntityUpdated(oldFeed, newFeed, cluster));
        newFeed.setFrequency(Frequency.fromString("days(1)"));
        Assert.assertTrue(UpdateHelper.isEntityUpdated(oldFeed, newFeed, cluster));

        Process oldProcess = processParser.parseAndValidate(this.getClass().
                getResourceAsStream(PROCESS_XML));
        prepare(oldProcess);
View Full Code Here

        Assert.assertTrue(UpdateHelper.isEntityUpdated(oldProcess, newProcess, cluster));
    }

    @Test
    public void testShouldUpdateFeed() throws Exception {
        Feed oldFeed = parser.parseAndValidate(this.getClass().getResourceAsStream(FEED_XML));

        Feed newFeed = (Feed) oldFeed.copy();
        Process process = processParser.parseAndValidate(this.getClass().getResourceAsStream(PROCESS_XML));
        prepare(process);
        String cluster = process.getClusters().getClusters().get(0).getName();

        Assert.assertFalse(UpdateHelper.shouldUpdate(oldFeed, newFeed, process, cluster));

        newFeed.getLateArrival().setCutOff(Frequency.fromString("hours(1)"));
        Assert.assertFalse(UpdateHelper.shouldUpdate(oldFeed, newFeed, process, cluster));

        newFeed.getLateArrival().setCutOff(oldFeed.getLateArrival().getCutOff());
        getLocation(newFeed, LocationType.DATA, cluster).setPath("/test");
        Assert.assertTrue(UpdateHelper.shouldUpdate(oldFeed, newFeed, process, cluster));

        getLocation(newFeed, LocationType.DATA, cluster).setPath(
                getLocation(oldFeed, LocationType.DATA, cluster).getPath());
        newFeed.setFrequency(Frequency.fromString("months(1)"));
        Assert.assertTrue(UpdateHelper.shouldUpdate(oldFeed, newFeed, process, cluster));

        newFeed.setFrequency(oldFeed.getFrequency());
        Partition partition = new Partition();
        partition.setName("1");
        newFeed.getPartitions().getPartitions().add(partition);
        Assert.assertFalse(UpdateHelper.shouldUpdate(oldFeed, newFeed, process, cluster));

        Property property = new Property();
        property.setName("1");
        property.setValue("1");
        newFeed.setProperties(new Properties());
        newFeed.getProperties().getProperties().add(property);
        Assert.assertFalse(UpdateHelper.shouldUpdate(oldFeed, newFeed, process, cluster));

        newFeed.getProperties().getProperties().remove(0);
        Assert.assertFalse(UpdateHelper.shouldUpdate(oldFeed, newFeed, process, cluster));

        FeedHelper.getCluster(newFeed, process.getClusters().getClusters().get(0).getName()).getValidity().setStart(
                SchemaHelper.parseDateUTC("2012-11-01T00:00Z"));
        Assert.assertFalse(UpdateHelper.shouldUpdate(oldFeed, newFeed, process, cluster));

        FeedHelper.getCluster(newFeed, process.getClusters().getClusters().get(0).getName()).getValidity().
                setStart(FeedHelper.getCluster(oldFeed,
                        process.getClusters().getClusters().get(0).getName()).getValidity().getStart());

        //Change location to table should trigger process update
        newFeed.setLocations(null);
        CatalogTable table = new CatalogTable();
        table.setUri("catalog:default:clicks-blah#ds=${YEAR}-${MONTH}-${DAY}-${HOUR}");
        newFeed.setTable(table);
        Assert.assertFalse(UpdateHelper.shouldUpdate(oldFeed, newFeed, process, cluster));
    }
View Full Code Here

    }

    @Test
    public void testShouldUpdateTable() throws Exception {
        InputStream inputStream = getClass().getResourceAsStream("/config/feed/hive-table-feed.xml");
        Feed oldTableFeed = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(inputStream);
        getStore().publish(EntityType.FEED, oldTableFeed);

        String cluster = "testCluster";
        Feed newTableFeed = (Feed) oldTableFeed.copy();
        Assert.assertFalse(UpdateHelper.isEntityUpdated(oldTableFeed, newTableFeed, cluster));

        newTableFeed.setGroups("newgroups");
        Assert.assertFalse(UpdateHelper.isEntityUpdated(oldTableFeed, newTableFeed, cluster));
        newTableFeed.setFrequency(Frequency.fromString("days(1)"));
        Assert.assertTrue(UpdateHelper.isEntityUpdated(oldTableFeed, newTableFeed, cluster));

        final CatalogTable table = new CatalogTable();
        table.setUri("catalog:default:clicks-blah#ds=${YEAR}-${MONTH}-${DAY}-${HOUR}");
        newTableFeed.setTable(table);
        Assert.assertTrue(UpdateHelper.isEntityUpdated(oldTableFeed, newTableFeed, cluster));

        inputStream = getClass().getResourceAsStream("/config/process/process-table.xml");
        Process oldProcess = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(inputStream);
        FileSystem fs = dfsCluster.getFileSystem();
View Full Code Here

        Assert.assertEquals(getEdgesCount(service.getGraph()), 2); // +2 = cluster to colo and tag
    }

    @Test (dependsOnMethods = "testOnAddClusterEntity")
    public void testOnAddFeedEntity() throws Exception {
        Feed impressionsFeed = buildFeed("impression-feed", clusterEntity, "classified-as=Secure", "analytics",
                Storage.TYPE.FILESYSTEM, "/falcon/impression-feed/${YEAR}${MONTH}${DAY}");
        configStore.publish(EntityType.FEED, impressionsFeed);
        inputFeeds.add(impressionsFeed);
        verifyEntityWasAddedToGraph(impressionsFeed.getName(), RelationshipType.FEED_ENTITY);
        verifyFeedEntityEdges(impressionsFeed.getName());
        Assert.assertEquals(getVerticesCount(service.getGraph()), 7); // +4 = feed, tag, group, user
        Assert.assertEquals(getEdgesCount(service.getGraph()), 6); // +4 = cluster, tag, group, user

        Feed clicksFeed = buildFeed("clicks-feed", clusterEntity, "classified-as=Secure,classified-as=Financial",
                "analytics", Storage.TYPE.FILESYSTEM, "/falcon/clicks-feed/${YEAR}${MONTH}${DAY}");
        configStore.publish(EntityType.FEED, clicksFeed);
        inputFeeds.add(clicksFeed);
        verifyEntityWasAddedToGraph(clicksFeed.getName(), RelationshipType.FEED_ENTITY);
        Assert.assertEquals(getVerticesCount(service.getGraph()), 9); // feed and financial vertex
        Assert.assertEquals(getEdgesCount(service.getGraph()), 11); // +5 = cluster + user + 2Group + Tag

        Feed join1Feed = buildFeed("imp-click-join1", clusterEntity, "classified-as=Financial", "reporting,bi",
                Storage.TYPE.FILESYSTEM, "/falcon/imp-click-join1/${YEAR}${MONTH}${DAY}");
        configStore.publish(EntityType.FEED, join1Feed);
        outputFeeds.add(join1Feed);
        verifyEntityWasAddedToGraph(join1Feed.getName(), RelationshipType.FEED_ENTITY);
        Assert.assertEquals(getVerticesCount(service.getGraph()), 12); // + 3 = 1 feed and 2 groups
        Assert.assertEquals(getEdgesCount(service.getGraph()), 16); // +5 = cluster + user +
        // Group + 2Tags

        Feed join2Feed = buildFeed("imp-click-join2", clusterEntity, "classified-as=Secure,classified-as=Financial",
                "reporting,bi", Storage.TYPE.FILESYSTEM, "/falcon/imp-click-join2/${YEAR}${MONTH}${DAY}");
        configStore.publish(EntityType.FEED, join2Feed);
        outputFeeds.add(join2Feed);
        verifyEntityWasAddedToGraph(join2Feed.getName(), RelationshipType.FEED_ENTITY);

        Assert.assertEquals(getVerticesCount(service.getGraph()), 13); // +1 feed
        // +6 = user + 2tags + 2Groups + Cluster
        Assert.assertEquals(getEdgesCount(service.getGraph()), 22);
    }
View Full Code Here

        Assert.assertEquals(getEdgesCount(service.getGraph()), 63);
    }

    @Test(dependsOnMethods = "testOnChange")
    public void testOnFeedEntityChange() throws Exception {
        Feed oldFeed = inputFeeds.get(0);
        Feed newFeed = buildFeed(oldFeed.getName(), clusterEntity,
                "classified-as=Secured,source=data-warehouse", "reporting",
                Storage.TYPE.FILESYSTEM, "jail://global:00/falcon/impression-feed/20140101");

        try {
            configStore.initiateUpdate(newFeed);

            // add cluster
            org.apache.falcon.entity.v0.feed.Cluster feedCluster =
                    new org.apache.falcon.entity.v0.feed.Cluster();
            feedCluster.setName(bcpCluster.getName());
            newFeed.getClusters().getClusters().add(feedCluster);

            configStore.update(EntityType.FEED, newFeed);
        } finally {
            configStore.cleanupUpdateInit();
        }
View Full Code Here

    @Test
    public void testFeedUpdateWithOneDependentProcess() {
        try {
            ConfigurationStore.get().remove(EntityType.FEED, "clicks");
            ConfigurationStore.get().remove(EntityType.PROCESS, "sample");
            Feed feed = parser.parseAndValidate(this.getClass()
                    .getResourceAsStream(FEED_XML));
            ConfigurationStore.get().publish(EntityType.FEED, feed);
            storeEntity(EntityType.PROCESS, "sample");

            //Try parsing the same feed xml
View Full Code Here

        try {
            ConfigurationStore.get().remove(EntityType.FEED, "clicks");
            ConfigurationStore.get().remove(EntityType.PROCESS, "sample");
            ConfigurationStore.get().remove(EntityType.PROCESS, "sample2");
            ConfigurationStore.get().remove(EntityType.PROCESS, "sample3");
            Feed feed = parser.parseAndValidate(this.getClass()
                    .getResourceAsStream(FEED_XML));
            ConfigurationStore.get().publish(EntityType.FEED, feed);
            storeEntity(EntityType.PROCESS, "sample");
            storeEntity(EntityType.PROCESS, "sample2");
            storeEntity(EntityType.PROCESS, "sample3");
View Full Code Here

        return cluster;
    }

    private static Feed buildFeed(String feedName, Cluster cluster, String tags, String groups,
                                  Storage.TYPE storageType, String uriTemplate) {
        Feed feed = new Feed();
        feed.setName(feedName);
        feed.setTags(tags);
        feed.setGroups(groups);
        feed.setFrequency(Frequency.fromString("hours(1)"));

        org.apache.falcon.entity.v0.feed.Clusters
                clusters = new org.apache.falcon.entity.v0.feed.Clusters();
        feed.setClusters(clusters);
        org.apache.falcon.entity.v0.feed.Cluster feedCluster =
                new org.apache.falcon.entity.v0.feed.Cluster();
        feedCluster.setName(cluster.getName());
        clusters.getClusters().add(feedCluster);
View Full Code Here

        ConfigurationStore.get().remove(EntityType.PROCESS, "sample");
        ConfigurationStore.get().remove(EntityType.PROCESS, "sample2");
        storeEntity(EntityType.FEED, "impressionFeed");
        storeEntity(EntityType.FEED, "imp-click-join1");
        storeEntity(EntityType.FEED, "imp-click-join2");
        Feed feed = parser.parseAndValidate(this.getClass()
                .getResourceAsStream(FEED_XML));
        ConfigurationStore.get().publish(EntityType.FEED, feed);

        dfsCluster.getFileSystem().mkdirs(new Path("/falcon/test/workflow"));
        Process process = processParser.parseAndValidate(this.getClass()
View Full Code Here

TOP

Related Classes of org.apache.falcon.entity.v0.feed.Feed

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.