Package storm.trident

Examples of storm.trident.TridentTopology


        System.out.println("OK");
    }


    private static StormTopology externalState(LocalDRPC drpc, FeederBatchSpout spout) {
        TridentTopology topology = new TridentTopology();

        // You can reference existing data sources as well.
        // Here we are mocking up a "database"
        StateFactory stateFactory = new StateFactory() {
            @Override
            public State makeState(Map conf, IMetricsContext metrics, int partitionIndex, int numPartitions) {
                MemoryMapState<Integer> name_to_age = new MemoryMapState<Integer>("name_to_age");
                // This is a bit hard to read but it's just pre-populating the state
                List<List<Object>> keys = getKeys("ted", "mary", "jason", "tom", "chuck");
                name_to_age.multiPut(keys, ImmutableList.of(32, 21, 45, 52, 18));
                return name_to_age;
            }
        };
        TridentState nameToAge =
                topology.newStaticState(stateFactory);

        // Let's setup another state that keeps track of actor's appearance counts per location
        TridentState countState =
                topology
                        .newStream("spout", spout)
                        .groupBy(new Fields("actor","location"))
                        .persistentAggregate(new MemoryMapState.Factory(), new Count(), new Fields("count"));

        // Now, let's calculate the average age of actors seen
        topology
                .newDRPCStream("age_stats", drpc)
                .stateQuery(countState, new TupleCollectionGet(), new Fields("actor", "location"))
                .stateQuery(nameToAge, new Fields("actor"), new MapGet(), new Fields("age"))
                .each(new Fields("actor","location","age"), new Print())
                .groupBy(new Fields("location"))
                .chainedAgg()
                .aggregate(new Count(), new Fields("count"))
                .aggregate(new Fields("age"), new Sum(), new Fields("sum"))
                .chainEnd()
                .each(new Fields("sum", "count"), new DivideAsDouble(), new Fields("avg"))
                .project(new Fields("location", "count", "avg"))
        ;

        return topology.build();
    }
View Full Code Here


        KafkaConfig kafkaConf = new KafkaConfig(KafkaConfig.StaticHosts.fromHostString(hosts, 1),KAFKA_TOPIC );
        kafkaConf.scheme = new StringScheme();
       
        System.out.println(""+kafkaConf);
   
    TridentTopology tridentTopology = new TridentTopology();
   
 
    tridentTopology.newStream(IMPRESSION_SPOUT, new OpaqueTridentKafkaSpout(kafkaConf))
            .each(new Fields("str"), new ImpressionKeyValueSplitFuncion(), new Fields("key", "raw"))
            .parallelismHint(3)
            .partitionPersist(new HBaseFactory(),
            new Fields("key", "raw"), new TridentHBaseUpdater());
           
           
    System.out.println("About to start the cluster");
       
    if (args.length == 0) {
      System.out.println("Working in Local Cluster Mode");
      LocalCluster cluster = new LocalCluster();
      cluster.submitTopology("test", stormconfig, tridentTopology.build());
      System.out.println("Finished submitting local closter");
    } else {
      System.out.println("Starting storm submitter topology");
      System.out.println(" [====] "+tridentTopology);
      stormconfig.setNumWorkers(3);
      StormSubmitter.submitTopology(args[0], stormconfig, tridentTopology.build());
      System.out.println("Finished submitting the topology");
    }
  }
View Full Code Here

import storm.trident.operation.builtin.Debug;


public class Tester {
    public static void main(String[] args) throws Exception {
        TridentTopology topology = new TridentTopology();
        List<String> hosts = Arrays.asList("localhost:9092");
        KafkaConfig kafkaConf = new KafkaConfig(KafkaConfig.StaticHosts.fromHostString(hosts, 3), "test");
        kafkaConf.scheme = new StringScheme();
        topology.newStream("mykafka", new TransactionalTridentKafkaSpout(kafkaConf))
//                .aggregate(new Count(), new Fields("count"))
                .each(new Fields("str"), new Debug());
       
        LocalCluster cluster = new LocalCluster();
       
        StormTopology topo = topology.build();
       
        cluster.submitTopology("kafkatest", new Config(), topo);
        KillOptions killopts = new KillOptions();
        killopts.set_wait_secs(0);
        Utils.sleep(5000);
View Full Code Here

TOP

Related Classes of storm.trident.TridentTopology

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.