Package com.mongodb.hadoop.testutils

Examples of com.mongodb.hadoop.testutils.MapReduceJob


    @Test
    public void testBasicInputSource() {
        LOG.info("testing basic input source");
        LOG.info("WHAT?");
        new MapReduceJob(TreasuryYieldXMLConfig.class.getName())
            .jar(JOBJAR_PATH)
            .param("mongo.input.notimeout", "true")
            .inputUris(getInputUri())
            .outputUris(getOutputUri())
            .execute(isRunTestInVm());
View Full Code Here



    @Test
    public void testTreasuryJsonConfig() {
        mongoImport("yield_historical.in3", TREASURY_JSON_PATH);
        new MapReduceJob("com.mongodb.hadoop.examples.treasury.TreasuryYieldXMLConfig")
            .jar(JOBJAR_PATH)
            .param(MONGO_SPLITTER_CLASS, MultiMongoCollectionSplitter.class.getName())
            .param(MULTI_COLLECTION_CONF_KEY, collectionSettings().toString())
            .outputUris(getOutputUri())
            .execute(isRunTestInVm());
View Full Code Here

    @Test
    public void testMultipleCollectionSupport() {
        mongoImport(getInputUri().getCollection(), TREASURY_JSON_PATH);
        mongoImport(inputUri2.getCollection(), TREASURY_JSON_PATH);
        new MapReduceJob("com.mongodb.hadoop.examples.treasury.TreasuryYieldXMLConfig")
            .jar(JOBJAR_PATH)
            .param(MONGO_SPLITTER_CLASS, MultiMongoCollectionSplitter.class.getName())
            .inputUris(getInputUri(), inputUri2)
            .outputUris(getOutputUri())
            .execute(isRunTestInVm());
View Full Code Here

public class TestSharded extends BaseShardedTest {

    @Test
    public void testBasicInputSource() {
        new MapReduceJob(TreasuryYieldXMLConfig.class.getName())
            .jar(JOBJAR_PATH)
            .inputUris(getInputUri())
            .outputUris(getOutputUri())
            .execute(isRunTestInVm());
        compareResults(getMongos().getDB("mongo_hadoop").getCollection("yield_historical.out"), getReference());
View Full Code Here

    }

    @Test
    public void testMultiMongos() {
        MongoClientURI outputUri = getOutputUri();
        new MapReduceJob(TreasuryYieldXMLConfig.class.getName())
            .jar(JOBJAR_PATH)
            .param(INPUT_MONGOS_HOSTS, "localhost:27017 localhost:27018")
            .inputUris(getInputUri())
            .outputUris(outputUri)
            .execute(isRunTestInVm());
View Full Code Here

    public void testMultiOutputs() {
        DBObject opCounterBefore1 = (DBObject) getMongos().getDB("admin").command("serverStatus").get("opcounters");
        DBObject opCounterBefore2 = (DBObject) getMongos2().getDB("admin").command("serverStatus").get("opcounters");
        MongoClientURI outputUri = getOutputUri();

        new MapReduceJob(TreasuryYieldXMLConfig.class.getName())
            .jar(JOBJAR_PATH)
            .inputUris(getInputUri())
            .outputUris(outputUri, new MongoClientURIBuilder(outputUri).port(27018).build())
            .execute(isRunTestInVm());
View Full Code Here

    @Test
    public void testRangeQueries() {
        DBCollection collection = getMongos().getDB(getOutputUri().getDatabase()).getCollection(getOutputUri().getCollection());
        collection.drop();

        MapReduceJob job = new MapReduceJob(TreasuryYieldXMLConfig.class.getName())
                               .jar(JOBJAR_PATH)
                               .inputUris(getInputUri())
                               .outputUris(getOutputUri())
                               .param(SPLITS_USE_RANGEQUERY, "true");
        job.execute(isRunTestInVm());

        compareResults(collection, getReference());
        collection.drop();

        job.param(INPUT_QUERY, "{\"_id\":{\"$gt\":{\"$date\":1182470400000}}}").execute(isRunTestInVm());
        // Make sure that this fails when rangequery is used with a query that conflicts
        assertFalse("This collection shouldn't exist because of the failure",
                    getMongos().getDB("mongo_hadoop").getCollectionNames().contains("yield_historical.out"));
    }
View Full Code Here

        DBCollection destination = getShard2().getDB("mongo_hadoop").getCollection("yield_historical.in");
        for (DBObject doc : data) {
            destination.insert(doc, WriteConcern.UNACKNOWLEDGED);
        }

        new MapReduceJob(TreasuryYieldXMLConfig.class.getName())
            .jar(JOBJAR_PATH)
            .param(SPLITS_SLAVE_OK, "true")
            .param(SPLITS_USE_SHARDS, "true")
            .param(SPLITS_USE_CHUNKS, "false")
            .inputUris(new MongoClientURIBuilder(getInputUri()).readPreference(ReadPreference.secondary()).build())
            .execute(isRunTestInVm());

        compareResults(collection, getReference());
        collection.drop();

        new MapReduceJob(TreasuryYieldXMLConfig.class.getName())
            .jar(JOBJAR_PATH)
            .inputUris(new MongoClientURIBuilder(getInputUri()).readPreference(ReadPreference.secondary()).build())
            .param(SPLITS_SLAVE_OK, "true")
            .param(SPLITS_USE_SHARDS, "true")
            .param(SPLITS_USE_CHUNKS, "true")
View Full Code Here

    @Test
    public void testShardedClusterWithGtLtQueryFormats() {
        DBCollection collection = getMongos().getDB("mongo_hadoop").getCollection("yield_historical.out");
        collection.drop();

        MapReduceJob job = new MapReduceJob(TreasuryYieldXMLConfig.class.getName())
                               .jar(JOBJAR_PATH)
                               .inputUris(new MongoClientURIBuilder(getInputUri()).readPreference(ReadPreference.secondary()).build())
                               .outputUris(getOutputUri())
                               .param(SPLITS_USE_RANGEQUERY, "true");
        job.execute(isRunTestInVm());

        compareResults(collection, getReference());
        collection.drop();

        job.param(INPUT_QUERY, "{\"_id\":{\"$gt\":{\"$date\":1182470400000}}}")
           .inputUris(getInputUri())
           .execute(isRunTestInVm());
        // Make sure that this fails when rangequery is used with a query that conflicts
        assertEquals(collection.count(), 0);
    }
View Full Code Here

        MongoClient mongoClient = new MongoClient(uri);
        DBCollection collection = mongoClient.getDB(uri.getDatabase())
                                             .getCollection(uri.getCollection());

        final URI outputUri = new URI(uri.getURI());
        MapReduceJob job = new MapReduceJob(BookstoreConfig.class.getName())
                               .jar(JAR_PATH)
                               .inputUris(INVENTORY_BSON)
                               .outputUris(outputUri)
                               .param("mapred.input.dir", INVENTORY_BSON.toString());
        if (!CLUSTER_VERSION.startsWith("1.")) {
            job.inputFormat(BSONFileInputFormat.class);
        } else {
            job.mapredInputFormat(com.mongodb.hadoop.mapred.BSONFileInputFormat.class);
            job.mapredOutputFormat(MongoOutputFormat.class);
        }


        job.execute(false);

        DBObject object = collection.findOne(new BasicDBObject("_id", "history"));

        assertNotNull(object);
        List books = (List) object.get("books");
View Full Code Here

TOP

Related Classes of com.mongodb.hadoop.testutils.MapReduceJob

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.