Storm all 分组

all分组
        使用广播分组。
        builder.setBolt("split-bolt", new SplitBolt(),2).allGrouping("wcspout").setNumTasks(2);

package com.mao.storm.group.all;

import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.IRichSpout;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Random;


public class WordCountSpout implements IRichSpout {

    private TopologyContext context;
    private SpoutOutputCollector collector;
    List<String> stats;
    private int index = 0;
    public void open(Map map, TopologyContext context, SpoutOutputCollector collector) {
        //Util.sendToClient(this,"open()");
        this.collector = collector;
        this.context = context;
        stats = new ArrayList<String>();
        stats.add("hollo world tom");
        stats.add("hollo world tom1");
        stats.add("hollo world tom2");
        stats.add("hollo world tom3");
    }
    private Random r = new Random();

    public void close() {

    }

    public void activate() {

    }

    public void deactivate() {

    }

    public void nextTuple() {
        if (index<3){
            String line = stats.get(r.nextInt(4));
            System.out.println(this+" : nextTuple() : "+line+" : "+index);
            collector.emit(new Values(line),index);
            index++;
        }
    }

    /**
     * 回调处理
     */
    public void ack(Object msgId) {
        System.out.println(this+" : ack() : "+msgId);
    }

    public void fail(Object o) {

    }

    public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
        outputFieldsDeclarer.declare(new Fields("line"));
    }

    public Map<String, Object> getComponentConfiguration() {
        return null;
    }
}
package com.mao.storm.group.all;

import com.mao.storm.util.Util;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.IRichBolt;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;

import java.util.Map;

public class SplitBolt implements IRichBolt {

    private TopologyContext context;
    private OutputCollector collector;

    public void prepare(Map map, TopologyContext context, OutputCollector collector) {
        this.context = context;
        this.collector = collector;
    }

    public void execute(Tuple tuple) {
        String line = tuple.getString(0);
        Util.sendToLocalhost(this,line);
        String[] arr = line.split(" ");
        for (String s : arr){
            collector.emit(new Values(s,1));
        }
    }

    public void cleanup() {

    }

    public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
        outputFieldsDeclarer.declare(new Fields("word","count"));
    }

    public Map<String, Object> getComponentConfiguration() {
        return null;
    }
}
package com.mao.storm.group.all;


import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.topology.TopologyBuilder;

/**
 * App
 */
public class App {

    public static void main(String[] args) throws Exception {

        TopologyBuilder builder = new TopologyBuilder();

        //设置spout
        builder.setSpout("wcSpout",new WordCountSpout()).setNumTasks(2);
        //设置creator-Bolt
        builder.setBolt("split-bolt",new SplitBolt(),2).allGrouping("wcSpout").setNumTasks(2);

        Config config = new Config();
        config.setNumWorkers(2);
        config.setDebug(true);

        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("wc",config,builder.createTopology());
        System.out.println("over");
        Thread.sleep(20000);
        cluster.shutdown();
    }
}

猜你喜欢

转载自blog.csdn.net/mao502010435/article/details/89631257
今日推荐